ansible-playbook [core 2.17.7] config file = None configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/local/lib/python3.12/site-packages/ansible ansible collection location = /tmp/collections-wP6 executable location = /usr/local/bin/ansible-playbook python version = 3.12.8 (main, Dec 3 2024, 00:00:00) [GCC 14.2.1 20241104 (Red Hat 14.2.1-6)] (/usr/bin/python3.12) jinja version = 3.1.4 libyaml = True No config file found; using defaults running playbook inside collection fedora.linux_system_roles redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.profile_tasks to ansible.posix.profile_tasks Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_quadlet_demo.yml *********************************************** 2 plays in /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml PLAY [all] ********************************************************************* TASK [Include vault variables] ************************************************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:5 Saturday 21 December 2024 11:32:50 -0500 (0:00:00.007) 0:00:00.007 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_test_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n35383939616163653333633431363463313831383037386236646138333162396161356130303461\n3932623930643263313563336163316337643562333936360a363538636631313039343233383732\n38666530383538656639363465313230343533386130303833336434303438333161656262346562\n3362626538613031640a663330613638366132356534363534353239616666653466353961323533\n6565\n" }, "mysql_container_root_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n61333932373230333539663035366431326163363166363036323963623131363530326231303634\n6635326161643165363366323062333334363730376631660a393566366139353861656364656661\n38653463363837336639363032646433666361646535366137303464623261313663643336306465\n6264663730656337310a343962353137386238383064646533366433333437303566656433386233\n34343235326665646661623131643335313236313131353661386338343366316261643634653633\n3832313034366536616531323963333234326461353130303532\n" } }, "ansible_included_var_files": [ "/tmp/podman-v2f/tests/vars/vault-variables.yml" ], "changed": false } PLAY [Deploy the quadlet demo app] ********************************************* TASK [Gathering Facts] ********************************************************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:9 Saturday 21 December 2024 11:32:50 -0500 (0:00:00.018) 0:00:00.025 ***** [WARNING]: Platform linux on host managed-node2 is using the discovered Python interpreter at /usr/bin/python3.12, but future installation of another Python interpreter could change the meaning of that path. See https://docs.ansible.com/ansible- core/2.17/reference_appendices/interpreter_discovery.html for more information. ok: [managed-node2] TASK [Test is only supported on x86_64] **************************************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:38 Saturday 21 December 2024 11:32:52 -0500 (0:00:01.490) 0:00:01.515 ***** skipping: [managed-node2] => { "false_condition": "ansible_facts[\"architecture\"] != \"x86_64\"" } TASK [End test] **************************************************************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:45 Saturday 21 December 2024 11:32:52 -0500 (0:00:00.016) 0:00:01.532 ***** META: end_play conditional evaluated to False, continuing play skipping: [managed-node2] => { "skip_reason": "end_play conditional evaluated to False, continuing play" } MSG: end_play TASK [Generate certificates] *************************************************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:51 Saturday 21 December 2024 11:32:52 -0500 (0:00:00.011) 0:00:01.543 ***** included: fedora.linux_system_roles.certificate for managed-node2 TASK [fedora.linux_system_roles.certificate : Set version specific variables] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:2 Saturday 21 December 2024 11:32:52 -0500 (0:00:00.047) 0:00:01.591 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.certificate : Ensure ansible_facts used by role] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:2 Saturday 21 December 2024 11:32:52 -0500 (0:00:00.037) 0:00:01.628 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__certificate_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.certificate : Check if system is ostree] ******* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:10 Saturday 21 December 2024 11:32:52 -0500 (0:00:00.057) 0:00:01.685 ***** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.certificate : Set flag to indicate system is ostree] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:15 Saturday 21 December 2024 11:32:53 -0500 (0:00:00.529) 0:00:02.215 ***** ok: [managed-node2] => { "ansible_facts": { "__certificate_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.certificate : Set platform/version specific variables] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:19 Saturday 21 December 2024 11:32:53 -0500 (0:00:00.023) 0:00:02.238 ***** skipping: [managed-node2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__certificate_certmonger_packages": [ "certmonger", "python3-packaging" ] }, "ansible_included_var_files": [ "/tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/certificate/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__certificate_certmonger_packages": [ "certmonger", "python3-packaging" ] }, "ansible_included_var_files": [ "/tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/certificate/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.certificate : Ensure certificate role dependencies are installed] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:5 Saturday 21 December 2024 11:32:53 -0500 (0:00:00.045) 0:00:02.283 ***** changed: [managed-node2] => { "changed": true, "rc": 0, "results": [ "Installed: python3-cffi-1.16.0-7.el10.x86_64", "Installed: python3-pyasn1-0.6.1-1.el10.noarch", "Installed: python3-cryptography-43.0.0-4.el10.x86_64", "Installed: python3-ply-3.11-25.el10.noarch", "Installed: python3-pycparser-2.20-16.el10.noarch" ] } lsrpackages: python3-cryptography python3-dbus python3-pyasn1 TASK [fedora.linux_system_roles.certificate : Ensure provider packages are installed] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:23 Saturday 21 December 2024 11:32:55 -0500 (0:00:02.664) 0:00:04.948 ***** changed: [managed-node2] => (item=certmonger) => { "__certificate_provider": "certmonger", "ansible_loop_var": "__certificate_provider", "changed": true, "rc": 0, "results": [ "Installed: nss-sysinit-3.101.0-13.el10.x86_64", "Installed: nss-util-3.101.0-13.el10.x86_64", "Installed: certmonger-0.79.20-3.el10.x86_64", "Installed: python3-packaging-23.2-6.el10.noarch", "Installed: dbus-tools-1:1.14.10-5.el10.x86_64", "Installed: nspr-4.35.0-34.el10.x86_64", "Installed: nss-3.101.0-13.el10.x86_64", "Installed: nss-softokn-3.101.0-13.el10.x86_64", "Installed: nss-softokn-freebl-3.101.0-13.el10.x86_64" ] } lsrpackages: certmonger python3-packaging TASK [fedora.linux_system_roles.certificate : Ensure pre-scripts hooks directory exists] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:35 Saturday 21 December 2024 11:32:58 -0500 (0:00:02.986) 0:00:07.935 ***** changed: [managed-node2] => (item=certmonger) => { "__certificate_provider": "certmonger", "ansible_loop_var": "__certificate_provider", "changed": true, "gid": 0, "group": "root", "mode": "0700", "owner": "root", "path": "/etc/certmonger//pre-scripts", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.certificate : Ensure post-scripts hooks directory exists] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:61 Saturday 21 December 2024 11:32:59 -0500 (0:00:00.642) 0:00:08.578 ***** changed: [managed-node2] => (item=certmonger) => { "__certificate_provider": "certmonger", "ansible_loop_var": "__certificate_provider", "changed": true, "gid": 0, "group": "root", "mode": "0700", "owner": "root", "path": "/etc/certmonger//post-scripts", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.certificate : Ensure provider service is running] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:90 Saturday 21 December 2024 11:32:59 -0500 (0:00:00.464) 0:00:09.042 ***** changed: [managed-node2] => (item=certmonger) => { "__certificate_provider": "certmonger", "ansible_loop_var": "__certificate_provider", "changed": true, "enabled": true, "name": "certmonger", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:certmonger_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "network.target sysinit.target syslog.target dbus.socket system.slice dbus-broker.service basic.target systemd-journald.socket", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedorahosted.certmonger", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "Certificate monitoring and PKI enrollment", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveTasksMax": "22349", "EnvironmentFiles": "/etc/sysconfig/certmonger (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/sbin/certmonger ; argv[]=/usr/sbin/certmonger -S -p /run/certmonger.pid -n $OPTS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/certmonger ; argv[]=/usr/sbin/certmonger -S -p /run/certmonger.pid -n $OPTS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/certmonger.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "certmonger.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3123519488", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "certmonger.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/certmonger.pid", "PartOf": "dbus-broker.service", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target dbus.socket system.slice", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "UtmpMode": "init", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.certificate : Ensure certificate requests] ***** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:101 Saturday 21 December 2024 11:33:01 -0500 (0:00:01.124) 0:00:10.166 ***** changed: [managed-node2] => (item={'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}) => { "ansible_loop_var": "item", "changed": true, "item": { "ca": "self-sign", "dns": [ "localhost" ], "name": "quadlet_demo" } } MSG: Certificate requested (new). TASK [fedora.linux_system_roles.certificate : Slurp the contents of the files] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:152 Saturday 21 December 2024 11:33:01 -0500 (0:00:00.910) 0:00:11.077 ***** ok: [managed-node2] => (item=['cert', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => { "ansible_loop_var": "item", "changed": false, "content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURnekNDQW11Z0F3SUJBZ0lSQU9xVVU0U3V4azZXc1prYVlMZ2RjOEF3RFFZSktvWklodmNOQVFFTEJRQXcKVURFZ01CNEdBMVVFQXd3WFRHOWpZV3dnVTJsbmJtbHVaeUJCZFhSb2IzSnBkSGt4TERBcUJnTlZCQU1NSTJWaApPVFExTXpnMExXRmxZelkwWlRrMkxXSXhPVGt4WVRZd0xXSTRNV1EzTTJKbU1CNFhEVEkwTVRJeU1URTJNek13Ck1Wb1hEVEkxTVRJeU1URTJNek13TVZvd0ZERVNNQkFHQTFVRUF4TUpiRzlqWVd4b2IzTjBNSUlCSWpBTkJna3EKaGtpRzl3MEJBUUVGQUFPQ0FROEFNSUlCQ2dLQ0FRRUF4aDhudC8wSHNuQVRWQm5rZHE2TXpUSm5JaStjL0kySgpMbUJoZXNlTldLV0hhTy9UMG8wSS9oM0prRm9waXQzM3dYdDlNVkN1VXhBa2ljQWgrNzJiYVk5U00xNWJlczFpCjBLQjh0cnFramQ0VHJpL1ZFR25vd2c3TG5ESi95SHBNb0pZelFJMXFEQTBoOEQxVFVsd2sxQVdvb214VTJhNEUKMEhFdm5QYVZDT29Dc00wZnNEMnBoakhoTG12Z0ZXMVF6V3BLQzJ6dFVVeWZpUGJYWDk0bzRTSGV0cVJmRVM2cgpPUEwzdG90MnhYazZOcXBUVnMrMDVlV2g3cjlPVGs3d2xwSGZ3YWxHNlVUK3pCOWNKMC9YcFlnZEN4dVJNckRYCjE1cXVydGF0WkR6ZVlhb1liWEdRYnAwbVhIV1hzTVJxVlpPQmhiOWoyQnZIN0pKS1RCamdYd0lEQVFBQm80R1QKTUlHUU1Bc0dBMVVkRHdRRUF3SUZvREFVQmdOVkhSRUVEVEFMZ2dsc2IyTmhiR2h2YzNRd0hRWURWUjBsQkJZdwpGQVlJS3dZQkJRVUhBd0VHQ0NzR0FRVUZCd01DTUF3R0ExVWRFd0VCL3dRQ01BQXdIUVlEVlIwT0JCWUVGS1NNCm9XTmV2WjY3S2RzRHpYRkNJVWNkUlhaaE1COEdBMVVkSXdRWU1CYUFGSVVjMEZCd0dkcHRiNE4wV2tOSUJyamkKUWZQM01BMEdDU3FHU0liM0RRRUJDd1VBQTRJQkFRQjZpOEFYRWFSUFpUWklxRi9MazBSL2RIRjFqbjZ3R1YybgpOVWlHK1FWMWdqZSs1VW1Kbitrc2E1LysxbmtCTnJQWEdhNWJzOWpPNS9NS3RkbXBESzhKY3R1Z1RhZ2MvTkJKCi93OSs0RWdpSVlGRnN6b0RnZCtpZ3dlaUlRREhIOE1XR0FwT1ZOckRjRVhPYXMrdjVSUFd1enJkZ0dXWGhsbysKeExvWWtvN2luazF2T1E0RW1hZmFYeHJHMEwwcWRLeThmRjFVWmtaVVlnZlViTGRPV0NZS3dPWkRNNXI4VWJ3UwoxWHZzOGJVaG5uT2pReE5KVTRwdW05cnAyN0lIa1E3UlpPcGFhbzhNdm5XMHJKL3RsNWIySi9QRStZV2UwQWRwClNwcVRCS2VhZ1BoellidHNtUW9BY1dNSDVGaXR2WVMrOUNlRFJrdTdod1NSYlFFVTRiUmcKLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=", "encoding": "base64", "item": [ "cert", { "ca": "self-sign", "dns": [ "localhost" ], "name": "quadlet_demo" } ], "source": "/etc/pki/tls/certs/quadlet_demo.crt" } ok: [managed-node2] => (item=['key', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => { "ansible_loop_var": "item", "changed": false, "content": "LS0tLS1CRUdJTiBQUklWQVRFIEtFWS0tLS0tCk1JSUV2Z0lCQURBTkJna3Foa2lHOXcwQkFRRUZBQVNDQktnd2dnU2tBZ0VBQW9JQkFRREdIeWUzL1FleWNCTlUKR2VSMnJvek5NbWNpTDV6OGpZa3VZR0Y2eDQxWXBZZG83OVBTalFqK0hjbVFXaW1LM2ZmQmUzMHhVSzVURUNTSgp3Q0g3dlp0cGoxSXpYbHQ2eldMUW9IeTJ1cVNOM2hPdUw5VVFhZWpDRHN1Y01uL0lla3lnbGpOQWpXb01EU0h3ClBWTlNYQ1RVQmFpaWJGVFpyZ1RRY1MrYzlwVUk2Z0t3elIrd1BhbUdNZUV1YStBVmJWRE5ha29MYk8xUlRKK0kKOXRkZjNpamhJZDYycEY4UkxxczQ4dmUyaTNiRmVUbzJxbE5XejdUbDVhSHV2MDVPVHZDV2tkL0JxVWJwUlA3TQpIMXduVDllbGlCMExHNUV5c05mWG1xNnUxcTFrUE41aHFoaHRjWkJ1blNaY2RaZXd4R3BWazRHRnYyUFlHOGZzCmtrcE1HT0JmQWdNQkFBRUNnZ0VBQjVINkw3YThrV05rbHdLVVJHVlZYazZEai82c3dNa0hDYkxIYkJjWVBncGoKMEZmVm9BWjJrUUNDQ3F0L1NkM3R1OEs1c21XSVVRYUlERGEvWUZkSzRHK0lXYlozSFdGS3Iza1ZFREJmM1p5SQo0NlRTbElRa3o0RnhqZXJhRDNGK05idjNnbUJ2bHhLZ3VOckhCbThja3lObmpGUHRCcnoyVkhhaFlKM2F3bEtKCmJycW5ONXZaY2VBQnhQeDk5Z2pqdi9tV1lhdjlzMFMxSVl0aGozQk5md2szMXQ1MDNLWWl3NmRSN3hJeG8xSS8KcGpxL0Fsa1JEdmxqbGZPM3owdXhrT0txNmZiUHZvS2RMTFZ2ZVBXWGxoRnlCL1Y3QzhTbUpCUThkUHRlZ0tVSQpFT0hNby9ZQW1FaEo3bTRyd3J2RndiTVJtUWRKWWE5Q0hrRVVNdmdxQlFLQmdRRDdHS0hxZGRzOUZ5N2lPK3VhCjd3bXRKVENjemhCc2Rhcmxkc1BrYVpZd21aOFJwUXkzWDdCTE4wZmxPSTBNNjBoZ1RHelE4a0toK1lRbGhHMkMKSTZ0YXRaY2NWbXVJbmJDbVZNYzZZT3JHVHdCaUozOFdhT0M5dWwzYWdEMmNQWkVBSlFFOWFTNWxqTVh2Mi9odgpMRGQ5NTAxQnV2T3REbi9DeTdCMDFXZEpLd0tCZ1FESi9heU5DSmVDWTl0SUxhNnJzMWhVc3RiNHZtd1c0ekJxClpEUDE3WmF0SDN1eEdacXVhYVM4cnFIVUcvRktLTm5mdGRMKzZuYlFnUWN0VXFnSkdCRWpmaXJ6ZUd3OGpNeW4KdXJQTDNKc28yZm9xZTRYMFhYSlZLWjYxdzNIcUppZ2ZyRGtRclhqTVc5MFdGdm83MVBlOHkyeG16TVJZZTlEQgpzSkFTYS9DRG5RS0JnRVY4VTVKbWV2MDA1U3lHR0hVUG9VWGcvTXJuMjNPb1BGcXp1eElDaGlWODNhQXNaSkI2ClVuYitIVTg1anc3OExWTTQwd1FmaG9EOFk2MWVNRS9MUHlnakNHWTNKQnJ5U0dnL0hjTHgvd1pSNW5GRExOQXUKeGNLN1NDMm5QMlYxUkNNZUduRzhJYkxJZWg0ZWFXbWFldURFbVlTUEQ4L2NLMDdrdnpnVnRqZHhBb0dCQUxTTQpmelFCUGd4YTNhTUMwbUsvaUhFTStvS1ZFMkRSNm5EdTZGTW03OHdIbkRTTlFtb3NIQWhid0dNc0p0MjI3aWk5CkNjQzZLMEhzVTFINzFCOXMxOFQxL2NackdiUjExbWtHeGJldmxURkt4Z1k3SzExbU9kQk54NE9KRThYTUhZbFMKRkNBSUx6WGIxeDdyb3ZqZ1FDRU5ZL1B2aklIYkNwcFhERENyTXUweEFvR0JBSWszNDVrR1ViSG1wSkV3NnVZTwprV2NaRlk1OEVjZ3FQeXdsQWxZeTBJNjZQZWFzTTN3MnN4V2NnWVJjRlVtaWc4aE5tUzBvYVlpbll3bGczVUszCldjVFJ3WW5WQUMvbzg2OG1aMHNld0FiUXVLKy9oNklZYzk1VTJ6TzZ1bWpqdm1PcGVvY1dBNUZ0c0F6MDY4ZzQKNXZqdEJIekdaSVpHUGN2MUFlUUQxK202Ci0tLS0tRU5EIFBSSVZBVEUgS0VZLS0tLS0K", "encoding": "base64", "item": [ "key", { "ca": "self-sign", "dns": [ "localhost" ], "name": "quadlet_demo" } ], "source": "/etc/pki/tls/private/quadlet_demo.key" } ok: [managed-node2] => (item=['ca', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => { "ansible_loop_var": "item", "changed": false, "content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURnekNDQW11Z0F3SUJBZ0lSQU9xVVU0U3V4azZXc1prYVlMZ2RjOEF3RFFZSktvWklodmNOQVFFTEJRQXcKVURFZ01CNEdBMVVFQXd3WFRHOWpZV3dnVTJsbmJtbHVaeUJCZFhSb2IzSnBkSGt4TERBcUJnTlZCQU1NSTJWaApPVFExTXpnMExXRmxZelkwWlRrMkxXSXhPVGt4WVRZd0xXSTRNV1EzTTJKbU1CNFhEVEkwTVRJeU1URTJNek13Ck1Wb1hEVEkxTVRJeU1URTJNek13TVZvd0ZERVNNQkFHQTFVRUF4TUpiRzlqWVd4b2IzTjBNSUlCSWpBTkJna3EKaGtpRzl3MEJBUUVGQUFPQ0FROEFNSUlCQ2dLQ0FRRUF4aDhudC8wSHNuQVRWQm5rZHE2TXpUSm5JaStjL0kySgpMbUJoZXNlTldLV0hhTy9UMG8wSS9oM0prRm9waXQzM3dYdDlNVkN1VXhBa2ljQWgrNzJiYVk5U00xNWJlczFpCjBLQjh0cnFramQ0VHJpL1ZFR25vd2c3TG5ESi95SHBNb0pZelFJMXFEQTBoOEQxVFVsd2sxQVdvb214VTJhNEUKMEhFdm5QYVZDT29Dc00wZnNEMnBoakhoTG12Z0ZXMVF6V3BLQzJ6dFVVeWZpUGJYWDk0bzRTSGV0cVJmRVM2cgpPUEwzdG90MnhYazZOcXBUVnMrMDVlV2g3cjlPVGs3d2xwSGZ3YWxHNlVUK3pCOWNKMC9YcFlnZEN4dVJNckRYCjE1cXVydGF0WkR6ZVlhb1liWEdRYnAwbVhIV1hzTVJxVlpPQmhiOWoyQnZIN0pKS1RCamdYd0lEQVFBQm80R1QKTUlHUU1Bc0dBMVVkRHdRRUF3SUZvREFVQmdOVkhSRUVEVEFMZ2dsc2IyTmhiR2h2YzNRd0hRWURWUjBsQkJZdwpGQVlJS3dZQkJRVUhBd0VHQ0NzR0FRVUZCd01DTUF3R0ExVWRFd0VCL3dRQ01BQXdIUVlEVlIwT0JCWUVGS1NNCm9XTmV2WjY3S2RzRHpYRkNJVWNkUlhaaE1COEdBMVVkSXdRWU1CYUFGSVVjMEZCd0dkcHRiNE4wV2tOSUJyamkKUWZQM01BMEdDU3FHU0liM0RRRUJDd1VBQTRJQkFRQjZpOEFYRWFSUFpUWklxRi9MazBSL2RIRjFqbjZ3R1YybgpOVWlHK1FWMWdqZSs1VW1Kbitrc2E1LysxbmtCTnJQWEdhNWJzOWpPNS9NS3RkbXBESzhKY3R1Z1RhZ2MvTkJKCi93OSs0RWdpSVlGRnN6b0RnZCtpZ3dlaUlRREhIOE1XR0FwT1ZOckRjRVhPYXMrdjVSUFd1enJkZ0dXWGhsbysKeExvWWtvN2luazF2T1E0RW1hZmFYeHJHMEwwcWRLeThmRjFVWmtaVVlnZlViTGRPV0NZS3dPWkRNNXI4VWJ3UwoxWHZzOGJVaG5uT2pReE5KVTRwdW05cnAyN0lIa1E3UlpPcGFhbzhNdm5XMHJKL3RsNWIySi9QRStZV2UwQWRwClNwcVRCS2VhZ1BoellidHNtUW9BY1dNSDVGaXR2WVMrOUNlRFJrdTdod1NSYlFFVTRiUmcKLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=", "encoding": "base64", "item": [ "ca", { "ca": "self-sign", "dns": [ "localhost" ], "name": "quadlet_demo" } ], "source": "/etc/pki/tls/certs/quadlet_demo.crt" } TASK [fedora.linux_system_roles.certificate : Create return data] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:160 Saturday 21 December 2024 11:33:03 -0500 (0:00:01.211) 0:00:12.289 ***** ok: [managed-node2] => { "ansible_facts": { "certificate_test_certs": { "quadlet_demo": { "ca": "/etc/pki/tls/certs/quadlet_demo.crt", "ca_content": "-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRAOqUU4Suxk6WsZkaYLgdc8AwDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMI2Vh\nOTQ1Mzg0LWFlYzY0ZTk2LWIxOTkxYTYwLWI4MWQ3M2JmMB4XDTI0MTIyMTE2MzMw\nMVoXDTI1MTIyMTE2MzMwMVowFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxh8nt/0HsnATVBnkdq6MzTJnIi+c/I2J\nLmBheseNWKWHaO/T0o0I/h3JkFopit33wXt9MVCuUxAkicAh+72baY9SM15bes1i\n0KB8trqkjd4Tri/VEGnowg7LnDJ/yHpMoJYzQI1qDA0h8D1TUlwk1AWoomxU2a4E\n0HEvnPaVCOoCsM0fsD2phjHhLmvgFW1QzWpKC2ztUUyfiPbXX94o4SHetqRfES6r\nOPL3tot2xXk6NqpTVs+05eWh7r9OTk7wlpHfwalG6UT+zB9cJ0/XpYgdCxuRMrDX\n15qurtatZDzeYaoYbXGQbp0mXHWXsMRqVZOBhb9j2BvH7JJKTBjgXwIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFKSM\noWNevZ67KdsDzXFCIUcdRXZhMB8GA1UdIwQYMBaAFIUc0FBwGdptb4N0WkNIBrji\nQfP3MA0GCSqGSIb3DQEBCwUAA4IBAQB6i8AXEaRPZTZIqF/Lk0R/dHF1jn6wGV2n\nNUiG+QV1gje+5UmJn+ksa5/+1nkBNrPXGa5bs9jO5/MKtdmpDK8JctugTagc/NBJ\n/w9+4EgiIYFFszoDgd+igweiIQDHH8MWGApOVNrDcEXOas+v5RPWuzrdgGWXhlo+\nxLoYko7ink1vOQ4EmafaXxrG0L0qdKy8fF1UZkZUYgfUbLdOWCYKwOZDM5r8UbwS\n1Xvs8bUhnnOjQxNJU4pum9rp27IHkQ7RZOpaao8MvnW0rJ/tl5b2J/PE+YWe0Adp\nSpqTBKeagPhzYbtsmQoAcWMH5FitvYS+9CeDRku7hwSRbQEU4bRg\n-----END CERTIFICATE-----\n", "cert": "/etc/pki/tls/certs/quadlet_demo.crt", "cert_content": "-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRAOqUU4Suxk6WsZkaYLgdc8AwDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMI2Vh\nOTQ1Mzg0LWFlYzY0ZTk2LWIxOTkxYTYwLWI4MWQ3M2JmMB4XDTI0MTIyMTE2MzMw\nMVoXDTI1MTIyMTE2MzMwMVowFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxh8nt/0HsnATVBnkdq6MzTJnIi+c/I2J\nLmBheseNWKWHaO/T0o0I/h3JkFopit33wXt9MVCuUxAkicAh+72baY9SM15bes1i\n0KB8trqkjd4Tri/VEGnowg7LnDJ/yHpMoJYzQI1qDA0h8D1TUlwk1AWoomxU2a4E\n0HEvnPaVCOoCsM0fsD2phjHhLmvgFW1QzWpKC2ztUUyfiPbXX94o4SHetqRfES6r\nOPL3tot2xXk6NqpTVs+05eWh7r9OTk7wlpHfwalG6UT+zB9cJ0/XpYgdCxuRMrDX\n15qurtatZDzeYaoYbXGQbp0mXHWXsMRqVZOBhb9j2BvH7JJKTBjgXwIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFKSM\noWNevZ67KdsDzXFCIUcdRXZhMB8GA1UdIwQYMBaAFIUc0FBwGdptb4N0WkNIBrji\nQfP3MA0GCSqGSIb3DQEBCwUAA4IBAQB6i8AXEaRPZTZIqF/Lk0R/dHF1jn6wGV2n\nNUiG+QV1gje+5UmJn+ksa5/+1nkBNrPXGa5bs9jO5/MKtdmpDK8JctugTagc/NBJ\n/w9+4EgiIYFFszoDgd+igweiIQDHH8MWGApOVNrDcEXOas+v5RPWuzrdgGWXhlo+\nxLoYko7ink1vOQ4EmafaXxrG0L0qdKy8fF1UZkZUYgfUbLdOWCYKwOZDM5r8UbwS\n1Xvs8bUhnnOjQxNJU4pum9rp27IHkQ7RZOpaao8MvnW0rJ/tl5b2J/PE+YWe0Adp\nSpqTBKeagPhzYbtsmQoAcWMH5FitvYS+9CeDRku7hwSRbQEU4bRg\n-----END CERTIFICATE-----\n", "key": "/etc/pki/tls/private/quadlet_demo.key", "key_content": "-----BEGIN PRIVATE KEY-----\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDGHye3/QeycBNU\nGeR2rozNMmciL5z8jYkuYGF6x41YpYdo79PSjQj+HcmQWimK3ffBe30xUK5TECSJ\nwCH7vZtpj1IzXlt6zWLQoHy2uqSN3hOuL9UQaejCDsucMn/IekygljNAjWoMDSHw\nPVNSXCTUBaiibFTZrgTQcS+c9pUI6gKwzR+wPamGMeEua+AVbVDNakoLbO1RTJ+I\n9tdf3ijhId62pF8RLqs48ve2i3bFeTo2qlNWz7Tl5aHuv05OTvCWkd/BqUbpRP7M\nH1wnT9eliB0LG5EysNfXmq6u1q1kPN5hqhhtcZBunSZcdZewxGpVk4GFv2PYG8fs\nkkpMGOBfAgMBAAECggEAB5H6L7a8kWNklwKURGVVXk6Dj/6swMkHCbLHbBcYPgpj\n0FfVoAZ2kQCCCqt/Sd3tu8K5smWIUQaIDDa/YFdK4G+IWbZ3HWFKr3kVEDBf3ZyI\n46TSlIQkz4FxjeraD3F+Nbv3gmBvlxKguNrHBm8ckyNnjFPtBrz2VHahYJ3awlKJ\nbrqnN5vZceABxPx99gjjv/mWYav9s0S1IYthj3BNfwk31t503KYiw6dR7xIxo1I/\npjq/AlkRDvljlfO3z0uxkOKq6fbPvoKdLLVvePWXlhFyB/V7C8SmJBQ8dPtegKUI\nEOHMo/YAmEhJ7m4rwrvFwbMRmQdJYa9CHkEUMvgqBQKBgQD7GKHqdds9Fy7iO+ua\n7wmtJTCczhBsdarldsPkaZYwmZ8RpQy3X7BLN0flOI0M60hgTGzQ8kKh+YQlhG2C\nI6tatZccVmuInbCmVMc6YOrGTwBiJ38WaOC9ul3agD2cPZEAJQE9aS5ljMXv2/hv\nLDd9501BuvOtDn/Cy7B01WdJKwKBgQDJ/ayNCJeCY9tILa6rs1hUstb4vmwW4zBq\nZDP17ZatH3uxGZquaaS8rqHUG/FKKNnftdL+6nbQgQctUqgJGBEjfirzeGw8jMyn\nurPL3Jso2foqe4X0XXJVKZ61w3HqJigfrDkQrXjMW90WFvo71Pe8y2xmzMRYe9DB\nsJASa/CDnQKBgEV8U5Jmev005SyGGHUPoUXg/Mrn23OoPFqzuxIChiV83aAsZJB6\nUnb+HU85jw78LVM40wQfhoD8Y61eME/LPygjCGY3JBrySGg/HcLx/wZR5nFDLNAu\nxcK7SC2nP2V1RCMeGnG8IbLIeh4eaWmaeuDEmYSPD8/cK07kvzgVtjdxAoGBALSM\nfzQBPgxa3aMC0mK/iHEM+oKVE2DR6nDu6FMm78wHnDSNQmosHAhbwGMsJt227ii9\nCcC6K0HsU1H71B9s18T1/cZrGbR11mkGxbevlTFKxgY7K11mOdBNx4OJE8XMHYlS\nFCAILzXb1x7rovjgQCENY/PvjIHbCppXDDCrMu0xAoGBAIk345kGUbHmpJEw6uYO\nkWcZFY58EcgqPywlAlYy0I66PeasM3w2sxWcgYRcFUmig8hNmS0oaYinYwlg3UK3\nWcTRwYnVAC/o868mZ0sewAbQuK+/h6IYc95U2zO6umjjvmOpeocWA5FtsAz068g4\n5vjtBHzGZIZGPcv1AeQD1+m6\n-----END PRIVATE KEY-----\n" } } }, "changed": false } TASK [fedora.linux_system_roles.certificate : Stop tracking certificates] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:176 Saturday 21 December 2024 11:33:03 -0500 (0:00:00.070) 0:00:12.359 ***** ok: [managed-node2] => (item={'cert': '/etc/pki/tls/certs/quadlet_demo.crt', 'cert_content': '-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRAOqUU4Suxk6WsZkaYLgdc8AwDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMI2Vh\nOTQ1Mzg0LWFlYzY0ZTk2LWIxOTkxYTYwLWI4MWQ3M2JmMB4XDTI0MTIyMTE2MzMw\nMVoXDTI1MTIyMTE2MzMwMVowFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxh8nt/0HsnATVBnkdq6MzTJnIi+c/I2J\nLmBheseNWKWHaO/T0o0I/h3JkFopit33wXt9MVCuUxAkicAh+72baY9SM15bes1i\n0KB8trqkjd4Tri/VEGnowg7LnDJ/yHpMoJYzQI1qDA0h8D1TUlwk1AWoomxU2a4E\n0HEvnPaVCOoCsM0fsD2phjHhLmvgFW1QzWpKC2ztUUyfiPbXX94o4SHetqRfES6r\nOPL3tot2xXk6NqpTVs+05eWh7r9OTk7wlpHfwalG6UT+zB9cJ0/XpYgdCxuRMrDX\n15qurtatZDzeYaoYbXGQbp0mXHWXsMRqVZOBhb9j2BvH7JJKTBjgXwIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFKSM\noWNevZ67KdsDzXFCIUcdRXZhMB8GA1UdIwQYMBaAFIUc0FBwGdptb4N0WkNIBrji\nQfP3MA0GCSqGSIb3DQEBCwUAA4IBAQB6i8AXEaRPZTZIqF/Lk0R/dHF1jn6wGV2n\nNUiG+QV1gje+5UmJn+ksa5/+1nkBNrPXGa5bs9jO5/MKtdmpDK8JctugTagc/NBJ\n/w9+4EgiIYFFszoDgd+igweiIQDHH8MWGApOVNrDcEXOas+v5RPWuzrdgGWXhlo+\nxLoYko7ink1vOQ4EmafaXxrG0L0qdKy8fF1UZkZUYgfUbLdOWCYKwOZDM5r8UbwS\n1Xvs8bUhnnOjQxNJU4pum9rp27IHkQ7RZOpaao8MvnW0rJ/tl5b2J/PE+YWe0Adp\nSpqTBKeagPhzYbtsmQoAcWMH5FitvYS+9CeDRku7hwSRbQEU4bRg\n-----END CERTIFICATE-----\n', 'key': '/etc/pki/tls/private/quadlet_demo.key', 'key_content': '-----BEGIN PRIVATE KEY-----\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDGHye3/QeycBNU\nGeR2rozNMmciL5z8jYkuYGF6x41YpYdo79PSjQj+HcmQWimK3ffBe30xUK5TECSJ\nwCH7vZtpj1IzXlt6zWLQoHy2uqSN3hOuL9UQaejCDsucMn/IekygljNAjWoMDSHw\nPVNSXCTUBaiibFTZrgTQcS+c9pUI6gKwzR+wPamGMeEua+AVbVDNakoLbO1RTJ+I\n9tdf3ijhId62pF8RLqs48ve2i3bFeTo2qlNWz7Tl5aHuv05OTvCWkd/BqUbpRP7M\nH1wnT9eliB0LG5EysNfXmq6u1q1kPN5hqhhtcZBunSZcdZewxGpVk4GFv2PYG8fs\nkkpMGOBfAgMBAAECggEAB5H6L7a8kWNklwKURGVVXk6Dj/6swMkHCbLHbBcYPgpj\n0FfVoAZ2kQCCCqt/Sd3tu8K5smWIUQaIDDa/YFdK4G+IWbZ3HWFKr3kVEDBf3ZyI\n46TSlIQkz4FxjeraD3F+Nbv3gmBvlxKguNrHBm8ckyNnjFPtBrz2VHahYJ3awlKJ\nbrqnN5vZceABxPx99gjjv/mWYav9s0S1IYthj3BNfwk31t503KYiw6dR7xIxo1I/\npjq/AlkRDvljlfO3z0uxkOKq6fbPvoKdLLVvePWXlhFyB/V7C8SmJBQ8dPtegKUI\nEOHMo/YAmEhJ7m4rwrvFwbMRmQdJYa9CHkEUMvgqBQKBgQD7GKHqdds9Fy7iO+ua\n7wmtJTCczhBsdarldsPkaZYwmZ8RpQy3X7BLN0flOI0M60hgTGzQ8kKh+YQlhG2C\nI6tatZccVmuInbCmVMc6YOrGTwBiJ38WaOC9ul3agD2cPZEAJQE9aS5ljMXv2/hv\nLDd9501BuvOtDn/Cy7B01WdJKwKBgQDJ/ayNCJeCY9tILa6rs1hUstb4vmwW4zBq\nZDP17ZatH3uxGZquaaS8rqHUG/FKKNnftdL+6nbQgQctUqgJGBEjfirzeGw8jMyn\nurPL3Jso2foqe4X0XXJVKZ61w3HqJigfrDkQrXjMW90WFvo71Pe8y2xmzMRYe9DB\nsJASa/CDnQKBgEV8U5Jmev005SyGGHUPoUXg/Mrn23OoPFqzuxIChiV83aAsZJB6\nUnb+HU85jw78LVM40wQfhoD8Y61eME/LPygjCGY3JBrySGg/HcLx/wZR5nFDLNAu\nxcK7SC2nP2V1RCMeGnG8IbLIeh4eaWmaeuDEmYSPD8/cK07kvzgVtjdxAoGBALSM\nfzQBPgxa3aMC0mK/iHEM+oKVE2DR6nDu6FMm78wHnDSNQmosHAhbwGMsJt227ii9\nCcC6K0HsU1H71B9s18T1/cZrGbR11mkGxbevlTFKxgY7K11mOdBNx4OJE8XMHYlS\nFCAILzXb1x7rovjgQCENY/PvjIHbCppXDDCrMu0xAoGBAIk345kGUbHmpJEw6uYO\nkWcZFY58EcgqPywlAlYy0I66PeasM3w2sxWcgYRcFUmig8hNmS0oaYinYwlg3UK3\nWcTRwYnVAC/o868mZ0sewAbQuK+/h6IYc95U2zO6umjjvmOpeocWA5FtsAz068g4\n5vjtBHzGZIZGPcv1AeQD1+m6\n-----END PRIVATE KEY-----\n', 'ca': '/etc/pki/tls/certs/quadlet_demo.crt', 'ca_content': '-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRAOqUU4Suxk6WsZkaYLgdc8AwDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMI2Vh\nOTQ1Mzg0LWFlYzY0ZTk2LWIxOTkxYTYwLWI4MWQ3M2JmMB4XDTI0MTIyMTE2MzMw\nMVoXDTI1MTIyMTE2MzMwMVowFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxh8nt/0HsnATVBnkdq6MzTJnIi+c/I2J\nLmBheseNWKWHaO/T0o0I/h3JkFopit33wXt9MVCuUxAkicAh+72baY9SM15bes1i\n0KB8trqkjd4Tri/VEGnowg7LnDJ/yHpMoJYzQI1qDA0h8D1TUlwk1AWoomxU2a4E\n0HEvnPaVCOoCsM0fsD2phjHhLmvgFW1QzWpKC2ztUUyfiPbXX94o4SHetqRfES6r\nOPL3tot2xXk6NqpTVs+05eWh7r9OTk7wlpHfwalG6UT+zB9cJ0/XpYgdCxuRMrDX\n15qurtatZDzeYaoYbXGQbp0mXHWXsMRqVZOBhb9j2BvH7JJKTBjgXwIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFKSM\noWNevZ67KdsDzXFCIUcdRXZhMB8GA1UdIwQYMBaAFIUc0FBwGdptb4N0WkNIBrji\nQfP3MA0GCSqGSIb3DQEBCwUAA4IBAQB6i8AXEaRPZTZIqF/Lk0R/dHF1jn6wGV2n\nNUiG+QV1gje+5UmJn+ksa5/+1nkBNrPXGa5bs9jO5/MKtdmpDK8JctugTagc/NBJ\n/w9+4EgiIYFFszoDgd+igweiIQDHH8MWGApOVNrDcEXOas+v5RPWuzrdgGWXhlo+\nxLoYko7ink1vOQ4EmafaXxrG0L0qdKy8fF1UZkZUYgfUbLdOWCYKwOZDM5r8UbwS\n1Xvs8bUhnnOjQxNJU4pum9rp27IHkQ7RZOpaao8MvnW0rJ/tl5b2J/PE+YWe0Adp\nSpqTBKeagPhzYbtsmQoAcWMH5FitvYS+9CeDRku7hwSRbQEU4bRg\n-----END CERTIFICATE-----\n'}) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "getcert", "stop-tracking", "-f", "/etc/pki/tls/certs/quadlet_demo.crt" ], "delta": "0:00:00.026584", "end": "2024-12-21 11:33:03.755189", "item": { "ca": "/etc/pki/tls/certs/quadlet_demo.crt", "ca_content": "-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRAOqUU4Suxk6WsZkaYLgdc8AwDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMI2Vh\nOTQ1Mzg0LWFlYzY0ZTk2LWIxOTkxYTYwLWI4MWQ3M2JmMB4XDTI0MTIyMTE2MzMw\nMVoXDTI1MTIyMTE2MzMwMVowFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxh8nt/0HsnATVBnkdq6MzTJnIi+c/I2J\nLmBheseNWKWHaO/T0o0I/h3JkFopit33wXt9MVCuUxAkicAh+72baY9SM15bes1i\n0KB8trqkjd4Tri/VEGnowg7LnDJ/yHpMoJYzQI1qDA0h8D1TUlwk1AWoomxU2a4E\n0HEvnPaVCOoCsM0fsD2phjHhLmvgFW1QzWpKC2ztUUyfiPbXX94o4SHetqRfES6r\nOPL3tot2xXk6NqpTVs+05eWh7r9OTk7wlpHfwalG6UT+zB9cJ0/XpYgdCxuRMrDX\n15qurtatZDzeYaoYbXGQbp0mXHWXsMRqVZOBhb9j2BvH7JJKTBjgXwIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFKSM\noWNevZ67KdsDzXFCIUcdRXZhMB8GA1UdIwQYMBaAFIUc0FBwGdptb4N0WkNIBrji\nQfP3MA0GCSqGSIb3DQEBCwUAA4IBAQB6i8AXEaRPZTZIqF/Lk0R/dHF1jn6wGV2n\nNUiG+QV1gje+5UmJn+ksa5/+1nkBNrPXGa5bs9jO5/MKtdmpDK8JctugTagc/NBJ\n/w9+4EgiIYFFszoDgd+igweiIQDHH8MWGApOVNrDcEXOas+v5RPWuzrdgGWXhlo+\nxLoYko7ink1vOQ4EmafaXxrG0L0qdKy8fF1UZkZUYgfUbLdOWCYKwOZDM5r8UbwS\n1Xvs8bUhnnOjQxNJU4pum9rp27IHkQ7RZOpaao8MvnW0rJ/tl5b2J/PE+YWe0Adp\nSpqTBKeagPhzYbtsmQoAcWMH5FitvYS+9CeDRku7hwSRbQEU4bRg\n-----END CERTIFICATE-----\n", "cert": "/etc/pki/tls/certs/quadlet_demo.crt", "cert_content": "-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRAOqUU4Suxk6WsZkaYLgdc8AwDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMI2Vh\nOTQ1Mzg0LWFlYzY0ZTk2LWIxOTkxYTYwLWI4MWQ3M2JmMB4XDTI0MTIyMTE2MzMw\nMVoXDTI1MTIyMTE2MzMwMVowFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxh8nt/0HsnATVBnkdq6MzTJnIi+c/I2J\nLmBheseNWKWHaO/T0o0I/h3JkFopit33wXt9MVCuUxAkicAh+72baY9SM15bes1i\n0KB8trqkjd4Tri/VEGnowg7LnDJ/yHpMoJYzQI1qDA0h8D1TUlwk1AWoomxU2a4E\n0HEvnPaVCOoCsM0fsD2phjHhLmvgFW1QzWpKC2ztUUyfiPbXX94o4SHetqRfES6r\nOPL3tot2xXk6NqpTVs+05eWh7r9OTk7wlpHfwalG6UT+zB9cJ0/XpYgdCxuRMrDX\n15qurtatZDzeYaoYbXGQbp0mXHWXsMRqVZOBhb9j2BvH7JJKTBjgXwIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFKSM\noWNevZ67KdsDzXFCIUcdRXZhMB8GA1UdIwQYMBaAFIUc0FBwGdptb4N0WkNIBrji\nQfP3MA0GCSqGSIb3DQEBCwUAA4IBAQB6i8AXEaRPZTZIqF/Lk0R/dHF1jn6wGV2n\nNUiG+QV1gje+5UmJn+ksa5/+1nkBNrPXGa5bs9jO5/MKtdmpDK8JctugTagc/NBJ\n/w9+4EgiIYFFszoDgd+igweiIQDHH8MWGApOVNrDcEXOas+v5RPWuzrdgGWXhlo+\nxLoYko7ink1vOQ4EmafaXxrG0L0qdKy8fF1UZkZUYgfUbLdOWCYKwOZDM5r8UbwS\n1Xvs8bUhnnOjQxNJU4pum9rp27IHkQ7RZOpaao8MvnW0rJ/tl5b2J/PE+YWe0Adp\nSpqTBKeagPhzYbtsmQoAcWMH5FitvYS+9CeDRku7hwSRbQEU4bRg\n-----END CERTIFICATE-----\n", "key": "/etc/pki/tls/private/quadlet_demo.key", "key_content": "-----BEGIN PRIVATE KEY-----\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDGHye3/QeycBNU\nGeR2rozNMmciL5z8jYkuYGF6x41YpYdo79PSjQj+HcmQWimK3ffBe30xUK5TECSJ\nwCH7vZtpj1IzXlt6zWLQoHy2uqSN3hOuL9UQaejCDsucMn/IekygljNAjWoMDSHw\nPVNSXCTUBaiibFTZrgTQcS+c9pUI6gKwzR+wPamGMeEua+AVbVDNakoLbO1RTJ+I\n9tdf3ijhId62pF8RLqs48ve2i3bFeTo2qlNWz7Tl5aHuv05OTvCWkd/BqUbpRP7M\nH1wnT9eliB0LG5EysNfXmq6u1q1kPN5hqhhtcZBunSZcdZewxGpVk4GFv2PYG8fs\nkkpMGOBfAgMBAAECggEAB5H6L7a8kWNklwKURGVVXk6Dj/6swMkHCbLHbBcYPgpj\n0FfVoAZ2kQCCCqt/Sd3tu8K5smWIUQaIDDa/YFdK4G+IWbZ3HWFKr3kVEDBf3ZyI\n46TSlIQkz4FxjeraD3F+Nbv3gmBvlxKguNrHBm8ckyNnjFPtBrz2VHahYJ3awlKJ\nbrqnN5vZceABxPx99gjjv/mWYav9s0S1IYthj3BNfwk31t503KYiw6dR7xIxo1I/\npjq/AlkRDvljlfO3z0uxkOKq6fbPvoKdLLVvePWXlhFyB/V7C8SmJBQ8dPtegKUI\nEOHMo/YAmEhJ7m4rwrvFwbMRmQdJYa9CHkEUMvgqBQKBgQD7GKHqdds9Fy7iO+ua\n7wmtJTCczhBsdarldsPkaZYwmZ8RpQy3X7BLN0flOI0M60hgTGzQ8kKh+YQlhG2C\nI6tatZccVmuInbCmVMc6YOrGTwBiJ38WaOC9ul3agD2cPZEAJQE9aS5ljMXv2/hv\nLDd9501BuvOtDn/Cy7B01WdJKwKBgQDJ/ayNCJeCY9tILa6rs1hUstb4vmwW4zBq\nZDP17ZatH3uxGZquaaS8rqHUG/FKKNnftdL+6nbQgQctUqgJGBEjfirzeGw8jMyn\nurPL3Jso2foqe4X0XXJVKZ61w3HqJigfrDkQrXjMW90WFvo71Pe8y2xmzMRYe9DB\nsJASa/CDnQKBgEV8U5Jmev005SyGGHUPoUXg/Mrn23OoPFqzuxIChiV83aAsZJB6\nUnb+HU85jw78LVM40wQfhoD8Y61eME/LPygjCGY3JBrySGg/HcLx/wZR5nFDLNAu\nxcK7SC2nP2V1RCMeGnG8IbLIeh4eaWmaeuDEmYSPD8/cK07kvzgVtjdxAoGBALSM\nfzQBPgxa3aMC0mK/iHEM+oKVE2DR6nDu6FMm78wHnDSNQmosHAhbwGMsJt227ii9\nCcC6K0HsU1H71B9s18T1/cZrGbR11mkGxbevlTFKxgY7K11mOdBNx4OJE8XMHYlS\nFCAILzXb1x7rovjgQCENY/PvjIHbCppXDDCrMu0xAoGBAIk345kGUbHmpJEw6uYO\nkWcZFY58EcgqPywlAlYy0I66PeasM3w2sxWcgYRcFUmig8hNmS0oaYinYwlg3UK3\nWcTRwYnVAC/o868mZ0sewAbQuK+/h6IYc95U2zO6umjjvmOpeocWA5FtsAz068g4\n5vjtBHzGZIZGPcv1AeQD1+m6\n-----END PRIVATE KEY-----\n" }, "rc": 0, "start": "2024-12-21 11:33:03.728605" } STDOUT: Request "20241221163301" removed. TASK [fedora.linux_system_roles.certificate : Remove files] ******************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:181 Saturday 21 December 2024 11:33:03 -0500 (0:00:00.574) 0:00:12.934 ***** changed: [managed-node2] => (item=/etc/pki/tls/certs/quadlet_demo.crt) => { "ansible_loop_var": "item", "changed": true, "item": "/etc/pki/tls/certs/quadlet_demo.crt", "path": "/etc/pki/tls/certs/quadlet_demo.crt", "state": "absent" } changed: [managed-node2] => (item=/etc/pki/tls/private/quadlet_demo.key) => { "ansible_loop_var": "item", "changed": true, "item": "/etc/pki/tls/private/quadlet_demo.key", "path": "/etc/pki/tls/private/quadlet_demo.key", "state": "absent" } ok: [managed-node2] => (item=/etc/pki/tls/certs/quadlet_demo.crt) => { "ansible_loop_var": "item", "changed": false, "item": "/etc/pki/tls/certs/quadlet_demo.crt", "path": "/etc/pki/tls/certs/quadlet_demo.crt", "state": "absent" } TASK [Run the role] ************************************************************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:62 Saturday 21 December 2024 11:33:04 -0500 (0:00:01.117) 0:00:14.052 ***** included: fedora.linux_system_roles.podman for managed-node2 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 21 December 2024 11:33:05 -0500 (0:00:00.081) 0:00:14.133 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 21 December 2024 11:33:05 -0500 (0:00:00.024) 0:00:14.157 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 21 December 2024 11:33:05 -0500 (0:00:00.034) 0:00:14.192 ***** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 21 December 2024 11:33:05 -0500 (0:00:00.360) 0:00:14.552 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 21 December 2024 11:33:05 -0500 (0:00:00.023) 0:00:14.575 ***** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 21 December 2024 11:33:05 -0500 (0:00:00.351) 0:00:14.927 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 21 December 2024 11:33:05 -0500 (0:00:00.026) 0:00:14.954 ***** ok: [managed-node2] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 21 December 2024 11:33:05 -0500 (0:00:00.042) 0:00:14.997 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 21 December 2024 11:33:06 -0500 (0:00:01.021) 0:00:16.019 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 21 December 2024 11:33:06 -0500 (0:00:00.073) 0:00:16.092 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 21 December 2024 11:33:07 -0500 (0:00:00.077) 0:00:16.170 ***** skipping: [managed-node2] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 21 December 2024 11:33:07 -0500 (0:00:00.106) 0:00:16.277 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 21 December 2024 11:33:07 -0500 (0:00:00.074) 0:00:16.352 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 21 December 2024 11:33:07 -0500 (0:00:00.073) 0:00:16.425 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.026889", "end": "2024-12-21 11:33:07.702720", "rc": 0, "start": "2024-12-21 11:33:07.675831" } STDOUT: podman version 5.3.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 21 December 2024 11:33:07 -0500 (0:00:00.477) 0:00:16.903 ***** ok: [managed-node2] => { "ansible_facts": { "podman_version": "5.3.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 21 December 2024 11:33:07 -0500 (0:00:00.066) 0:00:16.969 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 21 December 2024 11:33:07 -0500 (0:00:00.053) 0:00:17.022 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 21 December 2024 11:33:08 -0500 (0:00:00.163) 0:00:17.186 ***** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 21 December 2024 11:33:08 -0500 (0:00:00.216) 0:00:17.402 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 21 December 2024 11:33:08 -0500 (0:00:00.054) 0:00:17.457 ***** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 21 December 2024 11:33:08 -0500 (0:00:00.045) 0:00:17.502 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 21 December 2024 11:33:08 -0500 (0:00:00.056) 0:00:17.559 ***** ok: [managed-node2] => { "ansible_facts": { "getent_passwd": { "root": [ "x", "0", "0", "Super User", "/root", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 21 December 2024 11:33:09 -0500 (0:00:00.551) 0:00:18.111 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 21 December 2024 11:33:09 -0500 (0:00:00.084) 0:00:18.196 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 21 December 2024 11:33:09 -0500 (0:00:00.068) 0:00:18.264 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1734798569.6419525, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1734798549.6808388, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3128486407", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 21 December 2024 11:33:09 -0500 (0:00:00.485) 0:00:18.749 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 21 December 2024 11:33:09 -0500 (0:00:00.063) 0:00:18.813 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 21 December 2024 11:33:09 -0500 (0:00:00.043) 0:00:18.857 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 21 December 2024 11:33:09 -0500 (0:00:00.076) 0:00:18.933 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 21 December 2024 11:33:09 -0500 (0:00:00.076) 0:00:19.010 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 21 December 2024 11:33:09 -0500 (0:00:00.071) 0:00:19.081 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 21 December 2024 11:33:10 -0500 (0:00:00.050) 0:00:19.132 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 21 December 2024 11:33:10 -0500 (0:00:00.050) 0:00:19.183 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 21 December 2024 11:33:10 -0500 (0:00:00.050) 0:00:19.233 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Saturday 21 December 2024 11:33:10 -0500 (0:00:00.076) 0:00:19.310 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 21 December 2024 11:33:10 -0500 (0:00:00.062) 0:00:19.373 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 21 December 2024 11:33:10 -0500 (0:00:00.080) 0:00:19.454 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Saturday 21 December 2024 11:33:10 -0500 (0:00:00.032) 0:00:19.486 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 21 December 2024 11:33:10 -0500 (0:00:00.074) 0:00:19.561 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 21 December 2024 11:33:10 -0500 (0:00:00.048) 0:00:19.609 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Saturday 21 December 2024 11:33:10 -0500 (0:00:00.044) 0:00:19.653 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Saturday 21 December 2024 11:33:10 -0500 (0:00:00.097) 0:00:19.751 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Saturday 21 December 2024 11:33:10 -0500 (0:00:00.040) 0:00:19.792 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Saturday 21 December 2024 11:33:10 -0500 (0:00:00.036) 0:00:19.828 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Saturday 21 December 2024 11:33:10 -0500 (0:00:00.064) 0:00:19.893 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Saturday 21 December 2024 11:33:10 -0500 (0:00:00.033) 0:00:19.927 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Saturday 21 December 2024 11:33:10 -0500 (0:00:00.029) 0:00:19.956 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Saturday 21 December 2024 11:33:10 -0500 (0:00:00.029) 0:00:19.986 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Saturday 21 December 2024 11:33:10 -0500 (0:00:00.030) 0:00:20.016 ***** included: fedora.linux_system_roles.firewall for managed-node2 TASK [fedora.linux_system_roles.firewall : Setup firewalld] ******************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2 Saturday 21 December 2024 11:33:11 -0500 (0:00:00.124) 0:00:20.140 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node2 TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2 Saturday 21 December 2024 11:33:11 -0500 (0:00:00.057) 0:00:20.198 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if system is ostree] ********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10 Saturday 21 December 2024 11:33:11 -0500 (0:00:00.074) 0:00:20.273 ***** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15 Saturday 21 December 2024 11:33:11 -0500 (0:00:00.389) 0:00:20.662 ***** ok: [managed-node2] => { "ansible_facts": { "__firewall_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22 Saturday 21 December 2024 11:33:11 -0500 (0:00:00.057) 0:00:20.720 ***** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27 Saturday 21 December 2024 11:33:12 -0500 (0:00:00.445) 0:00:21.165 ***** ok: [managed-node2] => { "ansible_facts": { "__firewall_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.firewall : Install firewalld] ****************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31 Saturday 21 December 2024 11:33:12 -0500 (0:00:00.057) 0:00:21.223 ***** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: firewalld TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:43 Saturday 21 December 2024 11:33:12 -0500 (0:00:00.859) 0:00:22.083 ***** skipping: [managed-node2] => { "false_condition": "__firewall_is_transactional | d(false)" } TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:48 Saturday 21 December 2024 11:33:13 -0500 (0:00:00.071) 0:00:22.154 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:53 Saturday 21 December 2024 11:33:13 -0500 (0:00:00.063) 0:00:22.217 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Collect service facts] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5 Saturday 21 December 2024 11:33:13 -0500 (0:00:00.060) 0:00:22.278 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:9 Saturday 21 December 2024 11:33:13 -0500 (0:00:00.058) 0:00:22.336 ***** skipping: [managed-node2] => (item=nftables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "nftables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=iptables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "iptables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=ufw) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "ufw", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] *********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22 Saturday 21 December 2024 11:33:13 -0500 (0:00:00.067) 0:00:22.404 ***** ok: [managed-node2] => { "changed": false, "name": "firewalld", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2024-12-21 11:29:35 EST", "ActiveEnterTimestampMonotonic": "336278957", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "dbus.socket basic.target polkit.service system.slice sysinit.target dbus-broker.service", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2024-12-21 11:29:34 EST", "AssertTimestampMonotonic": "336011047", "Before": "shutdown.target multi-user.target network-pre.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "552135000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2024-12-21 11:29:34 EST", "ConditionTimestampMonotonic": "336011044", "ConfigurationDirectoryMode": "0755", "Conflicts": "ipset.service shutdown.target ip6tables.service iptables.service ebtables.service", "ControlGroup": "/system.slice/firewalld.service", "ControlGroupId": "4599", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveTasksMax": "22349", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Sat 2024-12-21 11:29:34 EST", "ExecMainHandoffTimestampMonotonic": "336045495", "ExecMainPID": "10892", "ExecMainStartTimestamp": "Sat 2024-12-21 11:29:34 EST", "ExecMainStartTimestampMonotonic": "336014579", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2024-12-21 11:29:34 EST", "InactiveExitTimestampMonotonic": "336015307", "InvocationID": "99d07a1d51fa44d09a9b8ad11c89f152", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "10892", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3151024128", "MemoryCurrent": "33697792", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "34119680", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectHome": "yes", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket system.slice sysinit.target", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2024-12-21 11:33:00 EST", "StateChangeTimestampMonotonic": "542126061", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28 Saturday 21 December 2024 11:33:13 -0500 (0:00:00.586) 0:00:22.990 ***** ok: [managed-node2] => { "changed": false, "enabled": true, "name": "firewalld", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2024-12-21 11:29:35 EST", "ActiveEnterTimestampMonotonic": "336278957", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "dbus.socket basic.target polkit.service system.slice sysinit.target dbus-broker.service", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2024-12-21 11:29:34 EST", "AssertTimestampMonotonic": "336011047", "Before": "shutdown.target multi-user.target network-pre.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "552135000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2024-12-21 11:29:34 EST", "ConditionTimestampMonotonic": "336011044", "ConfigurationDirectoryMode": "0755", "Conflicts": "ipset.service shutdown.target ip6tables.service iptables.service ebtables.service", "ControlGroup": "/system.slice/firewalld.service", "ControlGroupId": "4599", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveTasksMax": "22349", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Sat 2024-12-21 11:29:34 EST", "ExecMainHandoffTimestampMonotonic": "336045495", "ExecMainPID": "10892", "ExecMainStartTimestamp": "Sat 2024-12-21 11:29:34 EST", "ExecMainStartTimestampMonotonic": "336014579", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2024-12-21 11:29:34 EST", "InactiveExitTimestampMonotonic": "336015307", "InvocationID": "99d07a1d51fa44d09a9b8ad11c89f152", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "10892", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3147964416", "MemoryCurrent": "33697792", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "34119680", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectHome": "yes", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket system.slice sysinit.target", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2024-12-21 11:33:00 EST", "StateChangeTimestampMonotonic": "542126061", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:34 Saturday 21 December 2024 11:33:14 -0500 (0:00:00.559) 0:00:23.550 ***** ok: [managed-node2] => { "ansible_facts": { "__firewall_previous_replaced": false, "__firewall_python_cmd": "/usr/bin/python3.12", "__firewall_report_changed": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:43 Saturday 21 December 2024 11:33:14 -0500 (0:00:00.048) 0:00:23.598 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:55 Saturday 21 December 2024 11:33:14 -0500 (0:00:00.054) 0:00:23.653 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Configure firewall] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71 Saturday 21 December 2024 11:33:14 -0500 (0:00:00.055) 0:00:23.708 ***** changed: [managed-node2] => (item={'port': '8000/tcp', 'state': 'enabled'}) => { "__firewall_changed": true, "ansible_loop_var": "item", "changed": true, "item": { "port": "8000/tcp", "state": "enabled" } } changed: [managed-node2] => (item={'port': '9000/tcp', 'state': 'enabled'}) => { "__firewall_changed": true, "ansible_loop_var": "item", "changed": true, "item": { "port": "9000/tcp", "state": "enabled" } } TASK [fedora.linux_system_roles.firewall : Gather firewall config information] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:120 Saturday 21 December 2024 11:33:15 -0500 (0:00:01.218) 0:00:24.926 ***** skipping: [managed-node2] => (item={'port': '8000/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall | length == 1", "item": { "port": "8000/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item={'port': '9000/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall | length == 1", "item": { "port": "9000/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130 Saturday 21 December 2024 11:33:15 -0500 (0:00:00.090) 0:00:25.017 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall | length == 1", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:139 Saturday 21 December 2024 11:33:15 -0500 (0:00:00.070) 0:00:25.088 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:144 Saturday 21 December 2024 11:33:16 -0500 (0:00:00.049) 0:00:25.138 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:153 Saturday 21 December 2024 11:33:16 -0500 (0:00:00.050) 0:00:25.188 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Calculate what has changed] ********* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:163 Saturday 21 December 2024 11:33:16 -0500 (0:00:00.045) 0:00:25.233 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Show diffs] ************************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:169 Saturday 21 December 2024 11:33:16 -0500 (0:00:00.099) 0:00:25.332 ***** skipping: [managed-node2] => { "false_condition": "__firewall_previous_replaced | bool" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Saturday 21 December 2024 11:33:16 -0500 (0:00:00.094) 0:00:25.427 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Saturday 21 December 2024 11:33:16 -0500 (0:00:00.060) 0:00:25.488 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Saturday 21 December 2024 11:33:16 -0500 (0:00:00.051) 0:00:25.540 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Saturday 21 December 2024 11:33:16 -0500 (0:00:00.056) 0:00:25.596 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Saturday 21 December 2024 11:33:16 -0500 (0:00:00.037) 0:00:25.633 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 21 December 2024 11:33:16 -0500 (0:00:00.159) 0:00:25.792 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 21 December 2024 11:33:16 -0500 (0:00:00.044) 0:00:25.837 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:13 Saturday 21 December 2024 11:33:16 -0500 (0:00:00.049) 0:00:25.886 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 21 December 2024 11:33:16 -0500 (0:00:00.046) 0:00:25.933 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 21 December 2024 11:33:16 -0500 (0:00:00.033) 0:00:25.966 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 21 December 2024 11:33:16 -0500 (0:00:00.042) 0:00:26.009 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:18 Saturday 21 December 2024 11:33:16 -0500 (0:00:00.066) 0:00:26.075 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:34 Saturday 21 December 2024 11:33:17 -0500 (0:00:00.052) 0:00:26.127 ***** [WARNING]: Using a variable for a task's 'args' is unsafe in some situations (see https://docs.ansible.com/ansible/devel/reference_appendices/faq.html#argsplat- unsafe) changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 21 December 2024 11:33:17 -0500 (0:00:00.634) 0:00:26.762 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 21 December 2024 11:33:17 -0500 (0:00:00.039) 0:00:26.801 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:13 Saturday 21 December 2024 11:33:17 -0500 (0:00:00.037) 0:00:26.838 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 21 December 2024 11:33:17 -0500 (0:00:00.103) 0:00:26.942 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 21 December 2024 11:33:17 -0500 (0:00:00.029) 0:00:26.971 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 21 December 2024 11:33:17 -0500 (0:00:00.029) 0:00:27.000 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:18 Saturday 21 December 2024 11:33:17 -0500 (0:00:00.035) 0:00:27.036 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:34 Saturday 21 December 2024 11:33:17 -0500 (0:00:00.039) 0:00:27.076 ***** changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 21 December 2024 11:33:18 -0500 (0:00:00.611) 0:00:27.687 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 21 December 2024 11:33:18 -0500 (0:00:00.083) 0:00:27.771 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:13 Saturday 21 December 2024 11:33:18 -0500 (0:00:00.083) 0:00:27.854 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 21 December 2024 11:33:18 -0500 (0:00:00.094) 0:00:27.949 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 21 December 2024 11:33:18 -0500 (0:00:00.074) 0:00:28.024 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 21 December 2024 11:33:18 -0500 (0:00:00.047) 0:00:28.072 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:18 Saturday 21 December 2024 11:33:19 -0500 (0:00:00.049) 0:00:28.122 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:34 Saturday 21 December 2024 11:33:19 -0500 (0:00:00.045) 0:00:28.167 ***** changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Saturday 21 December 2024 11:33:19 -0500 (0:00:00.533) 0:00:28.700 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Saturday 21 December 2024 11:33:19 -0500 (0:00:00.026) 0:00:28.727 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 21 December 2024 11:33:19 -0500 (0:00:00.141) 0:00:28.868 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "quadlet-demo.network", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Network]\nSubnet=192.168.30.0/24\nGateway=192.168.30.1\nLabel=app=wordpress", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 21 December 2024 11:33:19 -0500 (0:00:00.070) 0:00:28.939 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 21 December 2024 11:33:19 -0500 (0:00:00.041) 0:00:28.980 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 21 December 2024 11:33:19 -0500 (0:00:00.091) 0:00:29.072 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo", "__podman_quadlet_type": "network", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 21 December 2024 11:33:20 -0500 (0:00:00.052) 0:00:29.125 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 21 December 2024 11:33:20 -0500 (0:00:00.055) 0:00:29.181 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 21 December 2024 11:33:20 -0500 (0:00:00.033) 0:00:29.215 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 21 December 2024 11:33:20 -0500 (0:00:00.034) 0:00:29.249 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 21 December 2024 11:33:20 -0500 (0:00:00.044) 0:00:29.293 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1734798569.6419525, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1734798549.6808388, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3128486407", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 21 December 2024 11:33:20 -0500 (0:00:00.401) 0:00:29.695 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 21 December 2024 11:33:20 -0500 (0:00:00.048) 0:00:29.743 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 21 December 2024 11:33:20 -0500 (0:00:00.046) 0:00:29.790 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 21 December 2024 11:33:20 -0500 (0:00:00.048) 0:00:29.838 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 21 December 2024 11:33:20 -0500 (0:00:00.053) 0:00:29.892 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 21 December 2024 11:33:20 -0500 (0:00:00.070) 0:00:29.963 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 21 December 2024 11:33:20 -0500 (0:00:00.048) 0:00:30.011 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 21 December 2024 11:33:20 -0500 (0:00:00.085) 0:00:30.097 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 21 December 2024 11:33:21 -0500 (0:00:00.111) 0:00:30.209 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-demo-network.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 21 December 2024 11:33:21 -0500 (0:00:00.158) 0:00:30.367 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 21 December 2024 11:33:21 -0500 (0:00:00.083) 0:00:30.450 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 21 December 2024 11:33:21 -0500 (0:00:00.090) 0:00:30.541 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.network", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 21 December 2024 11:33:21 -0500 (0:00:00.166) 0:00:30.708 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 21 December 2024 11:33:21 -0500 (0:00:00.069) 0:00:30.778 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 21 December 2024 11:33:21 -0500 (0:00:00.047) 0:00:30.825 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 21 December 2024 11:33:21 -0500 (0:00:00.163) 0:00:30.988 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 21 December 2024 11:33:21 -0500 (0:00:00.114) 0:00:31.103 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 21 December 2024 11:33:22 -0500 (0:00:00.039) 0:00:31.143 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 21 December 2024 11:33:22 -0500 (0:00:00.039) 0:00:31.182 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 21 December 2024 11:33:22 -0500 (0:00:00.057) 0:00:31.240 ***** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 21 December 2024 11:33:22 -0500 (0:00:00.044) 0:00:31.284 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 21 December 2024 11:33:22 -0500 (0:00:00.029) 0:00:31.313 ***** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 21 December 2024 11:33:22 -0500 (0:00:00.389) 0:00:31.703 ***** changed: [managed-node2] => { "changed": true, "checksum": "e57c08d49aff4bae8daab138d913aeddaa8682a0", "dest": "/etc/containers/systemd/quadlet-demo.network", "gid": 0, "group": "root", "md5sum": "061f3cf318cbd8ab5794bb1173831fb8", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 74, "src": "/root/.ansible/tmp/ansible-tmp-1734798802.6458833-18916-273275758786371/.source.network", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 21 December 2024 11:33:23 -0500 (0:00:00.941) 0:00:32.645 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 21 December 2024 11:33:23 -0500 (0:00:00.056) 0:00:32.701 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_copy_file is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 21 December 2024 11:33:23 -0500 (0:00:00.049) 0:00:32.751 ***** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 21 December 2024 11:33:24 -0500 (0:00:00.774) 0:00:33.525 ***** changed: [managed-node2] => { "changed": true, "name": "quadlet-demo-network.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "systemd-journald.socket system.slice -.mount network-online.target sysinit.target basic.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-demo-network.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveTasksMax": "22349", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo-network.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo-network.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3125829632", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo-network.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "sysinit.target -.mount system.slice", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo.network", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo-network", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 21 December 2024 11:33:25 -0500 (0:00:00.611) 0:00:34.136 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 21 December 2024 11:33:25 -0500 (0:00:00.049) 0:00:34.185 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "quadlet-demo-mysql.volume", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Volume]", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 21 December 2024 11:33:25 -0500 (0:00:00.065) 0:00:34.251 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 21 December 2024 11:33:25 -0500 (0:00:00.059) 0:00:34.311 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 21 December 2024 11:33:25 -0500 (0:00:00.039) 0:00:34.351 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo-mysql", "__podman_quadlet_type": "volume", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 21 December 2024 11:33:25 -0500 (0:00:00.091) 0:00:34.442 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 21 December 2024 11:33:25 -0500 (0:00:00.138) 0:00:34.580 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 21 December 2024 11:33:25 -0500 (0:00:00.039) 0:00:34.620 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 21 December 2024 11:33:25 -0500 (0:00:00.040) 0:00:34.660 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 21 December 2024 11:33:25 -0500 (0:00:00.058) 0:00:34.719 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1734798569.6419525, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1734798549.6808388, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3128486407", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 21 December 2024 11:33:26 -0500 (0:00:00.446) 0:00:35.165 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 21 December 2024 11:33:26 -0500 (0:00:00.051) 0:00:35.217 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 21 December 2024 11:33:26 -0500 (0:00:00.048) 0:00:35.266 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 21 December 2024 11:33:26 -0500 (0:00:00.086) 0:00:35.352 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 21 December 2024 11:33:26 -0500 (0:00:00.075) 0:00:35.428 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 21 December 2024 11:33:26 -0500 (0:00:00.059) 0:00:35.487 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 21 December 2024 11:33:26 -0500 (0:00:00.096) 0:00:35.583 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 21 December 2024 11:33:26 -0500 (0:00:00.054) 0:00:35.637 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 21 December 2024 11:33:26 -0500 (0:00:00.051) 0:00:35.689 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-demo-mysql-volume.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 21 December 2024 11:33:26 -0500 (0:00:00.086) 0:00:35.775 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 21 December 2024 11:33:26 -0500 (0:00:00.056) 0:00:35.831 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 21 December 2024 11:33:26 -0500 (0:00:00.053) 0:00:35.885 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.volume", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 21 December 2024 11:33:26 -0500 (0:00:00.120) 0:00:36.006 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 21 December 2024 11:33:26 -0500 (0:00:00.068) 0:00:36.074 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 21 December 2024 11:33:27 -0500 (0:00:00.072) 0:00:36.147 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 21 December 2024 11:33:27 -0500 (0:00:00.111) 0:00:36.258 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 21 December 2024 11:33:27 -0500 (0:00:00.107) 0:00:36.366 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 21 December 2024 11:33:27 -0500 (0:00:00.034) 0:00:36.401 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 21 December 2024 11:33:27 -0500 (0:00:00.033) 0:00:36.434 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 21 December 2024 11:33:27 -0500 (0:00:00.031) 0:00:36.465 ***** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 21 December 2024 11:33:27 -0500 (0:00:00.033) 0:00:36.499 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 21 December 2024 11:33:27 -0500 (0:00:00.046) 0:00:36.545 ***** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 34, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 21 December 2024 11:33:27 -0500 (0:00:00.434) 0:00:36.980 ***** changed: [managed-node2] => { "changed": true, "checksum": "585f8cbdf0ec73000f9227dcffbef71e9552ea4a", "dest": "/etc/containers/systemd/quadlet-demo-mysql.volume", "gid": 0, "group": "root", "md5sum": "5ddd03a022aeb4502d9bc8ce436b4233", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 9, "src": "/root/.ansible/tmp/ansible-tmp-1734798807.9232495-19193-85517954084435/.source.volume", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 21 December 2024 11:33:28 -0500 (0:00:00.838) 0:00:37.819 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 21 December 2024 11:33:28 -0500 (0:00:00.076) 0:00:37.895 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_copy_file is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 21 December 2024 11:33:28 -0500 (0:00:00.064) 0:00:37.960 ***** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 21 December 2024 11:33:29 -0500 (0:00:00.794) 0:00:38.754 ***** changed: [managed-node2] => { "changed": true, "name": "quadlet-demo-mysql-volume.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "network-online.target sysinit.target -.mount system.slice systemd-journald.socket basic.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-demo-mysql-volume.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveTasksMax": "22349", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo-mysql-volume.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo-mysql-volume.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3122974720", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo-mysql-volume.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "sysinit.target -.mount system.slice", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.volume", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo-mysql-volume", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 21 December 2024 11:33:30 -0500 (0:00:00.639) 0:00:39.394 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 21 December 2024 11:33:30 -0500 (0:00:00.035) 0:00:39.430 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Container]\nImage=quay.io/linux-system-roles/mysql:5.6\nContainerName=quadlet-demo-mysql\nVolume=quadlet-demo-mysql.volume:/var/lib/mysql\nVolume=/tmp/quadlet_demo:/var/lib/quadlet_demo:Z\nNetwork=quadlet-demo.network\nSecret=mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD\nHealthCmd=/bin/true\nHealthOnFailure=kill\n", "__podman_quadlet_template_src": "quadlet-demo-mysql.container.j2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 21 December 2024 11:33:30 -0500 (0:00:00.100) 0:00:39.530 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 21 December 2024 11:33:30 -0500 (0:00:00.049) 0:00:39.580 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_str", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 21 December 2024 11:33:30 -0500 (0:00:00.052) 0:00:39.632 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo-mysql", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 21 December 2024 11:33:30 -0500 (0:00:00.071) 0:00:39.703 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 21 December 2024 11:33:30 -0500 (0:00:00.152) 0:00:39.856 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 21 December 2024 11:33:30 -0500 (0:00:00.069) 0:00:39.925 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 21 December 2024 11:33:30 -0500 (0:00:00.059) 0:00:39.985 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 21 December 2024 11:33:31 -0500 (0:00:00.153) 0:00:40.139 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1734798569.6419525, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1734798549.6808388, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3128486407", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 21 December 2024 11:33:31 -0500 (0:00:00.409) 0:00:40.548 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 21 December 2024 11:33:31 -0500 (0:00:00.032) 0:00:40.580 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 21 December 2024 11:33:31 -0500 (0:00:00.034) 0:00:40.615 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 21 December 2024 11:33:31 -0500 (0:00:00.034) 0:00:40.649 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 21 December 2024 11:33:31 -0500 (0:00:00.035) 0:00:40.684 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 21 December 2024 11:33:31 -0500 (0:00:00.031) 0:00:40.716 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 21 December 2024 11:33:31 -0500 (0:00:00.038) 0:00:40.754 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 21 December 2024 11:33:31 -0500 (0:00:00.047) 0:00:40.802 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 21 December 2024 11:33:31 -0500 (0:00:00.050) 0:00:40.852 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-demo-mysql.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 21 December 2024 11:33:31 -0500 (0:00:00.080) 0:00:40.933 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 21 December 2024 11:33:31 -0500 (0:00:00.048) 0:00:40.981 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 21 December 2024 11:33:31 -0500 (0:00:00.056) 0:00:41.037 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.container", "__podman_volumes": [ "/tmp/quadlet_demo" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 21 December 2024 11:33:32 -0500 (0:00:00.100) 0:00:41.137 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 21 December 2024 11:33:32 -0500 (0:00:00.046) 0:00:41.184 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 21 December 2024 11:33:32 -0500 (0:00:00.034) 0:00:41.219 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 21 December 2024 11:33:32 -0500 (0:00:00.071) 0:00:41.291 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 21 December 2024 11:33:32 -0500 (0:00:00.051) 0:00:41.342 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 21 December 2024 11:33:32 -0500 (0:00:00.029) 0:00:41.371 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 21 December 2024 11:33:32 -0500 (0:00:00.069) 0:00:41.440 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 21 December 2024 11:33:32 -0500 (0:00:00.038) 0:00:41.479 ***** changed: [managed-node2] => (item=/tmp/quadlet_demo) => { "ansible_loop_var": "item", "changed": true, "gid": 0, "group": "root", "item": "/tmp/quadlet_demo", "mode": "0777", "owner": "root", "path": "/tmp/quadlet_demo", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 21 December 2024 11:33:32 -0500 (0:00:00.401) 0:00:41.880 ***** changed: [managed-node2] => (item=None) => { "attempts": 1, "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 21 December 2024 11:33:39 -0500 (0:00:06.390) 0:00:48.271 ***** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 67, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 21 December 2024 11:33:39 -0500 (0:00:00.394) 0:00:48.666 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 21 December 2024 11:33:39 -0500 (0:00:00.030) 0:00:48.696 ***** changed: [managed-node2] => { "changed": true, "checksum": "ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4", "dest": "/etc/containers/systemd/quadlet-demo-mysql.container", "gid": 0, "group": "root", "md5sum": "341b473056d2a5dfa35970b0d2e23a5d", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 363, "src": "/root/.ansible/tmp/ansible-tmp-1734798819.634735-19706-34961030233952/.source.container", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 21 December 2024 11:33:40 -0500 (0:00:00.699) 0:00:49.396 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_copy_content is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 21 December 2024 11:33:40 -0500 (0:00:00.029) 0:00:49.425 ***** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 21 December 2024 11:33:41 -0500 (0:00:00.784) 0:00:50.210 ***** changed: [managed-node2] => { "changed": true, "name": "quadlet-demo-mysql.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "tmp.mount quadlet-demo-mysql-volume.service sysinit.target basic.target -.mount systemd-journald.socket quadlet-demo-network.service system.slice network-online.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "multi-user.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuset io memory pids", "Description": "quadlet-demo-mysql.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveTasksMax": "22349", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo-mysql.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo-mysql.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "2977521664", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo-mysql.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "continue", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "quadlet-demo-mysql-volume.service quadlet-demo-network.service -.mount system.slice sysinit.target", "RequiresMountsFor": "/tmp/quadlet_demo /run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.container", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo-mysql", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 21 December 2024 11:33:42 -0500 (0:00:00.912) 0:00:51.122 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 21 December 2024 11:33:42 -0500 (0:00:00.032) 0:00:51.154 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "envoy-proxy-configmap.yml", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "---\napiVersion: v1\nkind: ConfigMap\nmetadata:\n name: envoy-proxy-config\ndata:\n envoy.yaml: |\n admin:\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 9901\n\n static_resources:\n listeners:\n - name: listener_0\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 8080\n filter_chains:\n - filters:\n - name: envoy.filters.network.http_connection_manager\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager\n stat_prefix: ingress_http\n codec_type: AUTO\n route_config:\n name: local_route\n virtual_hosts:\n - name: local_service\n domains: [\"*\"]\n routes:\n - match:\n prefix: \"/\"\n route:\n cluster: backend\n http_filters:\n - name: envoy.filters.http.router\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.http.router.v3.Router\n transport_socket:\n name: envoy.transport_sockets.tls\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.transport_sockets.tls.v3.DownstreamTlsContext\n common_tls_context:\n tls_certificates:\n - certificate_chain:\n filename: /etc/envoy-certificates/certificate.pem\n private_key:\n filename: /etc/envoy-certificates/certificate.key\n clusters:\n - name: backend\n connect_timeout: 5s\n type: STATIC\n dns_refresh_rate: 1800s\n lb_policy: ROUND_ROBIN\n load_assignment:\n cluster_name: backend\n endpoints:\n - lb_endpoints:\n - endpoint:\n address:\n socket_address:\n address: 127.0.0.1\n port_value: 80", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 21 December 2024 11:33:42 -0500 (0:00:00.042) 0:00:51.196 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 21 December 2024 11:33:42 -0500 (0:00:00.042) 0:00:51.239 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 21 December 2024 11:33:42 -0500 (0:00:00.050) 0:00:51.290 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "envoy-proxy-configmap", "__podman_quadlet_type": "yml", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 21 December 2024 11:33:42 -0500 (0:00:00.090) 0:00:51.380 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 21 December 2024 11:33:42 -0500 (0:00:00.070) 0:00:51.451 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 21 December 2024 11:33:42 -0500 (0:00:00.039) 0:00:51.490 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 21 December 2024 11:33:42 -0500 (0:00:00.040) 0:00:51.531 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 21 December 2024 11:33:42 -0500 (0:00:00.043) 0:00:51.575 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1734798569.6419525, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1734798549.6808388, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3128486407", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 21 December 2024 11:33:42 -0500 (0:00:00.456) 0:00:52.031 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 21 December 2024 11:33:42 -0500 (0:00:00.073) 0:00:52.104 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 21 December 2024 11:33:43 -0500 (0:00:00.030) 0:00:52.135 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 21 December 2024 11:33:43 -0500 (0:00:00.031) 0:00:52.166 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 21 December 2024 11:33:43 -0500 (0:00:00.029) 0:00:52.195 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 21 December 2024 11:33:43 -0500 (0:00:00.031) 0:00:52.226 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 21 December 2024 11:33:43 -0500 (0:00:00.030) 0:00:52.256 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 21 December 2024 11:33:43 -0500 (0:00:00.034) 0:00:52.291 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 21 December 2024 11:33:43 -0500 (0:00:00.048) 0:00:52.340 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 21 December 2024 11:33:43 -0500 (0:00:00.095) 0:00:52.436 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 21 December 2024 11:33:43 -0500 (0:00:00.037) 0:00:52.473 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 21 December 2024 11:33:43 -0500 (0:00:00.040) 0:00:52.514 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/envoy-proxy-configmap.yml", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 21 December 2024 11:33:43 -0500 (0:00:00.081) 0:00:52.595 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 21 December 2024 11:33:43 -0500 (0:00:00.039) 0:00:52.635 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 21 December 2024 11:33:43 -0500 (0:00:00.029) 0:00:52.664 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 21 December 2024 11:33:43 -0500 (0:00:00.064) 0:00:52.729 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 21 December 2024 11:33:43 -0500 (0:00:00.050) 0:00:52.780 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 21 December 2024 11:33:43 -0500 (0:00:00.030) 0:00:52.810 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 21 December 2024 11:33:43 -0500 (0:00:00.033) 0:00:52.843 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 21 December 2024 11:33:43 -0500 (0:00:00.049) 0:00:52.893 ***** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 21 December 2024 11:33:43 -0500 (0:00:00.039) 0:00:52.932 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 21 December 2024 11:33:43 -0500 (0:00:00.080) 0:00:53.013 ***** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 103, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 21 December 2024 11:33:44 -0500 (0:00:00.459) 0:00:53.472 ***** changed: [managed-node2] => { "changed": true, "checksum": "d681c7d56f912150d041873e880818b22a90c188", "dest": "/etc/containers/systemd/envoy-proxy-configmap.yml", "gid": 0, "group": "root", "md5sum": "aec75d972c231aac004e1338934544cf", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 2102, "src": "/root/.ansible/tmp/ansible-tmp-1734798824.4087186-19912-106128108103025/.source.yml", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 21 December 2024 11:33:45 -0500 (0:00:00.698) 0:00:54.170 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 21 December 2024 11:33:45 -0500 (0:00:00.031) 0:00:54.201 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_copy_file is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 21 December 2024 11:33:45 -0500 (0:00:00.030) 0:00:54.231 ***** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 21 December 2024 11:33:45 -0500 (0:00:00.769) 0:00:55.001 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_service_name | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 21 December 2024 11:33:45 -0500 (0:00:00.034) 0:00:55.035 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_service_name | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 21 December 2024 11:33:45 -0500 (0:00:00.033) 0:00:55.069 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "---\napiVersion: v1\nkind: PersistentVolumeClaim\nmetadata:\n name: wp-pv-claim\n labels:\n app: wordpress\nspec:\n accessModes:\n - ReadWriteOnce\n resources:\n requests:\n storage: 20Gi\n---\napiVersion: v1\nkind: Pod\nmetadata:\n name: quadlet-demo\nspec:\n containers:\n - name: wordpress\n image: quay.io/linux-system-roles/wordpress:4.8-apache\n env:\n - name: WORDPRESS_DB_HOST\n value: quadlet-demo-mysql\n - name: WORDPRESS_DB_PASSWORD\n valueFrom:\n secretKeyRef:\n name: mysql-root-password-kube\n key: password\n volumeMounts:\n - name: wordpress-persistent-storage\n mountPath: /var/www/html\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n - name: envoy\n image: quay.io/linux-system-roles/envoyproxy:v1.25.0\n volumeMounts:\n - name: config-volume\n mountPath: /etc/envoy\n - name: certificates\n mountPath: /etc/envoy-certificates\n env:\n - name: ENVOY_UID\n value: \"0\"\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n volumes:\n - name: config-volume\n configMap:\n name: envoy-proxy-config\n - name: certificates\n secret:\n secretName: envoy-certificates\n - name: wordpress-persistent-storage\n persistentVolumeClaim:\n claimName: wp-pv-claim\n - name: www # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3\n - name: create # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3-create\n", "__podman_quadlet_template_src": "quadlet-demo.yml.j2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 21 December 2024 11:33:46 -0500 (0:00:00.080) 0:00:55.150 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 21 December 2024 11:33:46 -0500 (0:00:00.038) 0:00:55.188 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_str", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 21 December 2024 11:33:46 -0500 (0:00:00.033) 0:00:55.222 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo", "__podman_quadlet_type": "yml", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 21 December 2024 11:33:46 -0500 (0:00:00.045) 0:00:55.267 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 21 December 2024 11:33:46 -0500 (0:00:00.056) 0:00:55.324 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 21 December 2024 11:33:46 -0500 (0:00:00.033) 0:00:55.357 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 21 December 2024 11:33:46 -0500 (0:00:00.032) 0:00:55.390 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 21 December 2024 11:33:46 -0500 (0:00:00.041) 0:00:55.431 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1734798569.6419525, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1734798549.6808388, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3128486407", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 21 December 2024 11:33:46 -0500 (0:00:00.401) 0:00:55.833 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 21 December 2024 11:33:46 -0500 (0:00:00.050) 0:00:55.883 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 21 December 2024 11:33:46 -0500 (0:00:00.047) 0:00:55.931 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 21 December 2024 11:33:46 -0500 (0:00:00.051) 0:00:55.983 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 21 December 2024 11:33:46 -0500 (0:00:00.056) 0:00:56.039 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 21 December 2024 11:33:47 -0500 (0:00:00.112) 0:00:56.152 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 21 December 2024 11:33:47 -0500 (0:00:00.040) 0:00:56.192 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 21 December 2024 11:33:47 -0500 (0:00:00.035) 0:00:56.228 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 21 December 2024 11:33:47 -0500 (0:00:00.034) 0:00:56.263 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 21 December 2024 11:33:47 -0500 (0:00:00.053) 0:00:56.316 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 21 December 2024 11:33:47 -0500 (0:00:00.033) 0:00:56.350 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 21 December 2024 11:33:47 -0500 (0:00:00.036) 0:00:56.386 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.yml", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 21 December 2024 11:33:47 -0500 (0:00:00.086) 0:00:56.473 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 21 December 2024 11:33:47 -0500 (0:00:00.066) 0:00:56.539 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 21 December 2024 11:33:47 -0500 (0:00:00.049) 0:00:56.588 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 21 December 2024 11:33:47 -0500 (0:00:00.101) 0:00:56.690 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 21 December 2024 11:33:47 -0500 (0:00:00.073) 0:00:56.763 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 21 December 2024 11:33:47 -0500 (0:00:00.035) 0:00:56.799 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 21 December 2024 11:33:47 -0500 (0:00:00.035) 0:00:56.835 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 21 December 2024 11:33:47 -0500 (0:00:00.032) 0:00:56.867 ***** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 21 December 2024 11:33:47 -0500 (0:00:00.031) 0:00:56.899 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 21 December 2024 11:33:47 -0500 (0:00:00.029) 0:00:56.928 ***** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 136, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 21 December 2024 11:33:48 -0500 (0:00:00.531) 0:00:57.459 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 21 December 2024 11:33:48 -0500 (0:00:00.032) 0:00:57.491 ***** changed: [managed-node2] => { "changed": true, "checksum": "998dccde0483b1654327a46ddd89cbaa47650370", "dest": "/etc/containers/systemd/quadlet-demo.yml", "gid": 0, "group": "root", "md5sum": "fd890594adfc24339cb9cdc5e7b19a66", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 1605, "src": "/root/.ansible/tmp/ansible-tmp-1734798828.4374363-20045-16704362976477/.source.yml", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 21 December 2024 11:33:49 -0500 (0:00:01.101) 0:00:58.593 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_copy_content is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 21 December 2024 11:33:49 -0500 (0:00:00.107) 0:00:58.701 ***** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 21 December 2024 11:33:50 -0500 (0:00:00.799) 0:00:59.500 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_service_name | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 21 December 2024 11:33:50 -0500 (0:00:00.034) 0:00:59.535 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_service_name | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 21 December 2024 11:33:50 -0500 (0:00:00.034) 0:00:59.569 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "quadlet-demo.kube", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Unit]\nRequires=quadlet-demo-mysql.service\nAfter=quadlet-demo-mysql.service\n\n[Kube]\n# Point to the yaml file in the same directory\nYaml=quadlet-demo.yml\n# Use the quadlet-demo network\nNetwork=quadlet-demo.network\n# Publish the envoy proxy data port\nPublishPort=8000:8080\n# Publish the envoy proxy admin port\nPublishPort=9000:9901\n# Use the envoy proxy config map in the same directory\nConfigMap=envoy-proxy-configmap.yml", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 21 December 2024 11:33:50 -0500 (0:00:00.045) 0:00:59.614 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 21 December 2024 11:33:50 -0500 (0:00:00.037) 0:00:59.652 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 21 December 2024 11:33:50 -0500 (0:00:00.034) 0:00:59.686 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo", "__podman_quadlet_type": "kube", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 21 December 2024 11:33:50 -0500 (0:00:00.059) 0:00:59.746 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 21 December 2024 11:33:50 -0500 (0:00:00.057) 0:00:59.804 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 21 December 2024 11:33:50 -0500 (0:00:00.033) 0:00:59.837 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 21 December 2024 11:33:50 -0500 (0:00:00.034) 0:00:59.871 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 21 December 2024 11:33:50 -0500 (0:00:00.041) 0:00:59.913 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1734798569.6419525, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1734798549.6808388, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3128486407", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 21 December 2024 11:33:51 -0500 (0:00:00.377) 0:01:00.290 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 21 December 2024 11:33:51 -0500 (0:00:00.029) 0:01:00.320 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 21 December 2024 11:33:51 -0500 (0:00:00.029) 0:01:00.349 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 21 December 2024 11:33:51 -0500 (0:00:00.028) 0:01:00.378 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 21 December 2024 11:33:51 -0500 (0:00:00.029) 0:01:00.407 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 21 December 2024 11:33:51 -0500 (0:00:00.027) 0:01:00.435 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 21 December 2024 11:33:51 -0500 (0:00:00.030) 0:01:00.466 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 21 December 2024 11:33:51 -0500 (0:00:00.029) 0:01:00.495 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 21 December 2024 11:33:51 -0500 (0:00:00.067) 0:01:00.563 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": [ "quadlet-demo.yml" ], "__podman_service_name": "quadlet-demo.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 21 December 2024 11:33:51 -0500 (0:00:00.052) 0:01:00.616 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 21 December 2024 11:33:51 -0500 (0:00:00.034) 0:01:00.650 ***** ok: [managed-node2] => { "changed": false, "content": "LS0tCmFwaVZlcnNpb246IHYxCmtpbmQ6IFBlcnNpc3RlbnRWb2x1bWVDbGFpbQptZXRhZGF0YToKICBuYW1lOiB3cC1wdi1jbGFpbQogIGxhYmVsczoKICAgIGFwcDogd29yZHByZXNzCnNwZWM6CiAgYWNjZXNzTW9kZXM6CiAgLSBSZWFkV3JpdGVPbmNlCiAgcmVzb3VyY2VzOgogICAgcmVxdWVzdHM6CiAgICAgIHN0b3JhZ2U6IDIwR2kKLS0tCmFwaVZlcnNpb246IHYxCmtpbmQ6IFBvZAptZXRhZGF0YToKICBuYW1lOiBxdWFkbGV0LWRlbW8Kc3BlYzoKICBjb250YWluZXJzOgogIC0gbmFtZTogd29yZHByZXNzCiAgICBpbWFnZTogcXVheS5pby9saW51eC1zeXN0ZW0tcm9sZXMvd29yZHByZXNzOjQuOC1hcGFjaGUKICAgIGVudjoKICAgIC0gbmFtZTogV09SRFBSRVNTX0RCX0hPU1QKICAgICAgdmFsdWU6IHF1YWRsZXQtZGVtby1teXNxbAogICAgLSBuYW1lOiBXT1JEUFJFU1NfREJfUEFTU1dPUkQKICAgICAgdmFsdWVGcm9tOgogICAgICAgIHNlY3JldEtleVJlZjoKICAgICAgICAgIG5hbWU6IG15c3FsLXJvb3QtcGFzc3dvcmQta3ViZQogICAgICAgICAga2V5OiBwYXNzd29yZAogICAgdm9sdW1lTW91bnRzOgogICAgLSBuYW1lOiB3b3JkcHJlc3MtcGVyc2lzdGVudC1zdG9yYWdlCiAgICAgIG1vdW50UGF0aDogL3Zhci93d3cvaHRtbAogICAgcmVzb3VyY2VzOgogICAgICByZXF1ZXN0czoKICAgICAgICBtZW1vcnk6ICI2NE1pIgogICAgICAgIGNwdTogIjI1MG0iCiAgICAgIGxpbWl0czoKICAgICAgICBtZW1vcnk6ICIxMjhNaSIKICAgICAgICBjcHU6ICI1MDBtIgogIC0gbmFtZTogZW52b3kKICAgIGltYWdlOiBxdWF5LmlvL2xpbnV4LXN5c3RlbS1yb2xlcy9lbnZveXByb3h5OnYxLjI1LjAKICAgIHZvbHVtZU1vdW50czoKICAgIC0gbmFtZTogY29uZmlnLXZvbHVtZQogICAgICBtb3VudFBhdGg6IC9ldGMvZW52b3kKICAgIC0gbmFtZTogY2VydGlmaWNhdGVzCiAgICAgIG1vdW50UGF0aDogL2V0Yy9lbnZveS1jZXJ0aWZpY2F0ZXMKICAgIGVudjoKICAgIC0gbmFtZTogRU5WT1lfVUlECiAgICAgIHZhbHVlOiAiMCIKICAgIHJlc291cmNlczoKICAgICAgcmVxdWVzdHM6CiAgICAgICAgbWVtb3J5OiAiNjRNaSIKICAgICAgICBjcHU6ICIyNTBtIgogICAgICBsaW1pdHM6CiAgICAgICAgbWVtb3J5OiAiMTI4TWkiCiAgICAgICAgY3B1OiAiNTAwbSIKICB2b2x1bWVzOgogIC0gbmFtZTogY29uZmlnLXZvbHVtZQogICAgY29uZmlnTWFwOgogICAgICBuYW1lOiBlbnZveS1wcm94eS1jb25maWcKICAtIG5hbWU6IGNlcnRpZmljYXRlcwogICAgc2VjcmV0OgogICAgICBzZWNyZXROYW1lOiBlbnZveS1jZXJ0aWZpY2F0ZXMKICAtIG5hbWU6IHdvcmRwcmVzcy1wZXJzaXN0ZW50LXN0b3JhZ2UKICAgIHBlcnNpc3RlbnRWb2x1bWVDbGFpbToKICAgICAgY2xhaW1OYW1lOiB3cC1wdi1jbGFpbQogIC0gbmFtZTogd3d3ICAjIG5vdCB1c2VkIC0gZm9yIHRlc3RpbmcgaG9zdHBhdGgKICAgIGhvc3RQYXRoOgogICAgICBwYXRoOiAvdG1wL2h0dHBkMwogIC0gbmFtZTogY3JlYXRlICAjIG5vdCB1c2VkIC0gZm9yIHRlc3RpbmcgaG9zdHBhdGgKICAgIGhvc3RQYXRoOgogICAgICBwYXRoOiAvdG1wL2h0dHBkMy1jcmVhdGUK", "encoding": "base64", "source": "/etc/containers/systemd/quadlet-demo.yml" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 21 December 2024 11:33:52 -0500 (0:00:00.498) 0:01:01.149 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [ "quay.io/linux-system-roles/wordpress:4.8-apache", "quay.io/linux-system-roles/envoyproxy:v1.25.0" ], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.kube", "__podman_volumes": [ "/tmp/httpd3", "/tmp/httpd3-create" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 21 December 2024 11:33:52 -0500 (0:00:00.088) 0:01:01.237 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 21 December 2024 11:33:52 -0500 (0:00:00.037) 0:01:01.275 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 21 December 2024 11:33:52 -0500 (0:00:00.030) 0:01:01.306 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 21 December 2024 11:33:52 -0500 (0:00:00.066) 0:01:01.372 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 21 December 2024 11:33:52 -0500 (0:00:00.059) 0:01:01.432 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 21 December 2024 11:33:52 -0500 (0:00:00.035) 0:01:01.468 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 21 December 2024 11:33:52 -0500 (0:00:00.038) 0:01:01.506 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 21 December 2024 11:33:52 -0500 (0:00:00.034) 0:01:01.541 ***** changed: [managed-node2] => (item=/tmp/httpd3) => { "ansible_loop_var": "item", "changed": true, "gid": 0, "group": "root", "item": "/tmp/httpd3", "mode": "0755", "owner": "root", "path": "/tmp/httpd3", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 6, "state": "directory", "uid": 0 } changed: [managed-node2] => (item=/tmp/httpd3-create) => { "ansible_loop_var": "item", "changed": true, "gid": 0, "group": "root", "item": "/tmp/httpd3-create", "mode": "0755", "owner": "root", "path": "/tmp/httpd3-create", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 21 December 2024 11:33:53 -0500 (0:00:00.753) 0:01:02.295 ***** changed: [managed-node2] => (item=None) => { "attempts": 1, "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => (item=None) => { "attempts": 1, "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 21 December 2024 11:34:10 -0500 (0:00:17.303) 0:01:19.598 ***** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 160, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 21 December 2024 11:34:10 -0500 (0:00:00.393) 0:01:19.991 ***** changed: [managed-node2] => { "changed": true, "checksum": "7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7", "dest": "/etc/containers/systemd/quadlet-demo.kube", "gid": 0, "group": "root", "md5sum": "da53c88f92b68b0487aa209f795b6bb3", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 456, "src": "/root/.ansible/tmp/ansible-tmp-1734798850.9322016-20524-56637568013428/.source.kube", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 21 December 2024 11:34:11 -0500 (0:00:00.710) 0:01:20.701 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 21 December 2024 11:34:11 -0500 (0:00:00.035) 0:01:20.737 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_copy_file is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 21 December 2024 11:34:11 -0500 (0:00:00.046) 0:01:20.784 ***** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 21 December 2024 11:34:12 -0500 (0:00:00.829) 0:01:21.613 ***** changed: [managed-node2] => { "changed": true, "name": "quadlet-demo.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "sysinit.target basic.target quadlet-demo-mysql.service network-online.target -.mount systemd-journald.socket system.slice quadlet-demo-network.service", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "multi-user.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-demo.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveTasksMax": "22349", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "2476826624", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "quadlet-demo-mysql.service quadlet-demo-network.service sysinit.target -.mount system.slice", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo.kube", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 21 December 2024 11:34:13 -0500 (0:00:01.191) 0:01:22.804 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Saturday 21 December 2024 11:34:13 -0500 (0:00:00.084) 0:01:22.889 ***** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Saturday 21 December 2024 11:34:13 -0500 (0:00:00.033) 0:01:22.923 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Saturday 21 December 2024 11:34:13 -0500 (0:00:00.030) 0:01:22.954 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Check quadlet files] ***************************************************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:96 Saturday 21 December 2024 11:34:13 -0500 (0:00:00.041) 0:01:22.996 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "ls", "-alrtF", "/etc/containers/systemd" ], "delta": "0:00:00.004321", "end": "2024-12-21 11:34:14.255170", "rc": 0, "start": "2024-12-21 11:34:14.250849" } STDOUT: total 24 drwxr-xr-x. 9 root root 178 Dec 21 11:29 ../ -rw-r--r--. 1 root root 74 Dec 21 11:33 quadlet-demo.network -rw-r--r--. 1 root root 9 Dec 21 11:33 quadlet-demo-mysql.volume -rw-r--r--. 1 root root 363 Dec 21 11:33 quadlet-demo-mysql.container -rw-r--r--. 1 root root 2102 Dec 21 11:33 envoy-proxy-configmap.yml -rw-r--r--. 1 root root 1605 Dec 21 11:33 quadlet-demo.yml -rw-r--r--. 1 root root 456 Dec 21 11:34 quadlet-demo.kube drwxr-xr-x. 2 root root 185 Dec 21 11:34 ./ TASK [Check containers] ******************************************************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:100 Saturday 21 December 2024 11:34:14 -0500 (0:00:00.446) 0:01:23.443 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "-a" ], "delta": "0:00:00.051667", "end": "2024-12-21 11:34:14.763122", "failed_when_result": false, "rc": 0, "start": "2024-12-21 11:34:14.711455" } STDOUT: CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES 1e5d0ec512aa localhost/podman-pause:5.3.1-1733097600 About a minute ago Up About a minute ce41ff3061ea-service 6232f80e7e24 localhost/podman-pause:5.3.1-1733097600 About a minute ago Up About a minute 0.0.0.0:15002->80/tcp d2f6641bb2ef-infra fb676e5504a3 quay.io/libpod/testimage:20210610 About a minute ago Up About a minute 0.0.0.0:15002->80/tcp httpd2-httpd2 61716cd30289 localhost/podman-pause:5.3.1-1733097600 About a minute ago Up About a minute a8db008d7cc8-service 2907e4388cf2 localhost/podman-pause:5.3.1-1733097600 About a minute ago Up About a minute 0.0.0.0:15003->80/tcp 26cc0fa7c809-infra a6061cf8dd2a quay.io/libpod/testimage:20210610 About a minute ago Up About a minute 0.0.0.0:15003->80/tcp httpd3-httpd3 203d365a6d08 quay.io/linux-system-roles/mysql:5.6 mysqld 33 seconds ago Up 33 seconds (healthy) 3306/tcp quadlet-demo-mysql 9f1e833e8645 localhost/podman-pause:5.3.1-1733097600 1 second ago Up 1 second a96f3a51b8d1-service a0a9c1950f0b localhost/podman-pause:5.3.1-1733097600 1 second ago Up 1 second 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp a20a2c426a00-infra ab1ed7027307 quay.io/linux-system-roles/wordpress:4.8-apache apache2-foregroun... 1 second ago Up 1 second 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp, 80/tcp quadlet-demo-wordpress d62af06fe80a quay.io/linux-system-roles/envoyproxy:v1.25.0 envoy -c /etc/env... 1 second ago Up 1 second 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp, 10000/tcp quadlet-demo-envoy TASK [Check volumes] *********************************************************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:105 Saturday 21 December 2024 11:34:14 -0500 (0:00:00.509) 0:01:23.953 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls" ], "delta": "0:00:00.029784", "end": "2024-12-21 11:34:15.203384", "failed_when_result": false, "rc": 0, "start": "2024-12-21 11:34:15.173600" } STDOUT: DRIVER VOLUME NAME local systemd-quadlet-demo-mysql local wp-pv-claim local envoy-proxy-config local envoy-certificates TASK [Check pods] ************************************************************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:110 Saturday 21 December 2024 11:34:15 -0500 (0:00:00.428) 0:01:24.381 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "pod", "ps", "--ctr-ids", "--ctr-names", "--ctr-status" ], "delta": "0:00:00.041631", "end": "2024-12-21 11:34:15.699969", "failed_when_result": false, "rc": 0, "start": "2024-12-21 11:34:15.658338" } STDOUT: POD ID NAME STATUS CREATED INFRA ID IDS NAMES STATUS a20a2c426a00 quadlet-demo Running 2 seconds ago a0a9c1950f0b a0a9c1950f0b,ab1ed7027307,d62af06fe80a a20a2c426a00-infra,quadlet-demo-wordpress,quadlet-demo-envoy running,running,running 26cc0fa7c809 httpd3 Running About a minute ago 2907e4388cf2 2907e4388cf2,a6061cf8dd2a 26cc0fa7c809-infra,httpd3-httpd3 running,running d2f6641bb2ef httpd2 Running About a minute ago 6232f80e7e24 6232f80e7e24,fb676e5504a3 d2f6641bb2ef-infra,httpd2-httpd2 running,running TASK [Check systemd] *********************************************************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:115 Saturday 21 December 2024 11:34:15 -0500 (0:00:00.514) 0:01:24.896 ***** ok: [managed-node2] => { "changed": false, "cmd": "set -euo pipefail; systemctl list-units | grep quadlet", "delta": "0:00:00.014744", "end": "2024-12-21 11:34:16.168979", "failed_when_result": false, "rc": 0, "start": "2024-12-21 11:34:16.154235" } STDOUT: quadlet-demo-mysql-volume.service loaded active exited quadlet-demo-mysql-volume.service quadlet-demo-mysql.service loaded active running quadlet-demo-mysql.service quadlet-demo-network.service loaded active exited quadlet-demo-network.service quadlet-demo.service loaded active running quadlet-demo.service TASK [Check web] *************************************************************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:121 Saturday 21 December 2024 11:34:16 -0500 (0:00:00.448) 0:01:25.345 ***** FAILED - RETRYING: [managed-node2]: Check web (6 retries left). FAILED - RETRYING: [managed-node2]: Check web (5 retries left). FAILED - RETRYING: [managed-node2]: Check web (4 retries left). FAILED - RETRYING: [managed-node2]: Check web (3 retries left). FAILED - RETRYING: [managed-node2]: Check web (2 retries left). FAILED - RETRYING: [managed-node2]: Check web (1 retries left). fatal: [managed-node2]: FAILED! => { "attempts": 6, "changed": false, "dest": "/run/out", "elapsed": 0, "url": "https://localhost:8000" } MSG: Request failed: TASK [Dump journal] ************************************************************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:142 Saturday 21 December 2024 11:34:49 -0500 (0:00:33.438) 0:01:58.783 ***** fatal: [managed-node2]: FAILED! => { "changed": false, "cmd": [ "journalctl", "-ex" ], "delta": "0:00:00.030687", "end": "2024-12-21 11:34:50.028287", "failed_when_result": true, "rc": 0, "start": "2024-12-21 11:34:49.997600" } STDOUT: Dec 21 11:29:09 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1 Dec 21 11:29:09 managed-node2 kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Dec 21 11:29:09 managed-node2 kernel: SELinux: policy capability ioctl_skip_cloexec=0 Dec 21 11:29:09 managed-node2 kernel: SELinux: policy capability userspace_initial_context=0 Dec 21 11:29:09 managed-node2 groupadd[8872]: group added to /etc/group: name=polkitd, GID=114 Dec 21 11:29:09 managed-node2 groupadd[8872]: group added to /etc/gshadow: name=polkitd Dec 21 11:29:09 managed-node2 groupadd[8872]: new group: name=polkitd, GID=114 Dec 21 11:29:09 managed-node2 useradd[8875]: new user: name=polkitd, UID=114, GID=114, home=/, shell=/sbin/nologin, from=none Dec 21 11:29:09 managed-node2 dbus-broker-launch[650]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Dec 21 11:29:09 managed-node2 dbus-broker-launch[650]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Dec 21 11:29:09 managed-node2 systemd[1]: Listening on pcscd.socket - PC/SC Smart Card Daemon Activation Socket. ░░ Subject: A start job for unit pcscd.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pcscd.socket has finished successfully. ░░ ░░ The job identifier is 1254. Dec 21 11:29:26 managed-node2 systemd[1]: Started run-r9c185c1ecde342118898852eb84e9dea.service - /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-r9c185c1ecde342118898852eb84e9dea.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-r9c185c1ecde342118898852eb84e9dea.service has finished successfully. ░░ ░░ The job identifier is 1332. Dec 21 11:29:26 managed-node2 systemctl[9567]: Warning: The unit file, source configuration file or drop-ins of man-db-cache-update.service changed on disk. Run 'systemctl daemon-reload' to reload units. Dec 21 11:29:26 managed-node2 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 1410. Dec 21 11:29:26 managed-node2 systemd[1]: Reload requested from client PID 9570 ('systemctl') (unit session-5.scope)... Dec 21 11:29:26 managed-node2 systemd[1]: Reloading... Dec 21 11:29:26 managed-node2 systemd[1]: Reloading finished in 249 ms. Dec 21 11:29:26 managed-node2 systemd[1]: Queuing reload/restart jobs for marked units… Dec 21 11:29:27 managed-node2 sudo[8747]: pam_unix(sudo:session): session closed for user root Dec 21 11:29:28 managed-node2 python3.12[9900]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:29:28 managed-node2 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Dec 21 11:29:28 managed-node2 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 1410. Dec 21 11:29:28 managed-node2 systemd[1]: run-r9c185c1ecde342118898852eb84e9dea.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-r9c185c1ecde342118898852eb84e9dea.service has successfully entered the 'dead' state. Dec 21 11:29:29 managed-node2 python3.12[10042]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Dec 21 11:29:29 managed-node2 python3.12[10174]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:29:31 managed-node2 python3.12[10307]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:29:32 managed-node2 python3.12[10438]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:29:32 managed-node2 python3.12[10569]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 21 11:29:34 managed-node2 python3.12[10701]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Dec 21 11:29:34 managed-node2 python3.12[10834]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Dec 21 11:29:34 managed-node2 systemd[1]: Reload requested from client PID 10837 ('systemctl') (unit session-5.scope)... Dec 21 11:29:34 managed-node2 systemd[1]: Reloading... Dec 21 11:29:34 managed-node2 systemd[1]: Reloading finished in 188 ms. Dec 21 11:29:34 managed-node2 systemd[1]: Starting firewalld.service - firewalld - dynamic firewall daemon... ░░ Subject: A start job for unit firewalld.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit firewalld.service has begun execution. ░░ ░░ The job identifier is 1488. Dec 21 11:29:35 managed-node2 systemd[1]: Started firewalld.service - firewalld - dynamic firewall daemon. ░░ Subject: A start job for unit firewalld.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit firewalld.service has finished successfully. ░░ ░░ The job identifier is 1488. Dec 21 11:29:35 managed-node2 kernel: Warning: Unmaintained driver is detected: ip_set Dec 21 11:29:35 managed-node2 systemd[1]: Starting polkit.service - Authorization Manager... ░░ Subject: A start job for unit polkit.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit polkit.service has begun execution. ░░ ░░ The job identifier is 1572. Dec 21 11:29:35 managed-node2 polkitd[11031]: Started polkitd version 125 Dec 21 11:29:35 managed-node2 systemd[1]: Started polkit.service - Authorization Manager. ░░ Subject: A start job for unit polkit.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit polkit.service has finished successfully. ░░ ░░ The job identifier is 1572. Dec 21 11:29:35 managed-node2 python3.12[11067]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Dec 21 11:29:37 managed-node2 python3.12[11207]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:29:37 managed-node2 rsyslogd[658]: imjournal: journal files changed, reloading... [v8.2408.0-2.el10 try https://www.rsyslog.com/e/0 ] Dec 21 11:29:37 managed-node2 python3.12[11339]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:29:38 managed-node2 python3.12[11470]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 21 11:29:38 managed-node2 python3.12[11602]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 21 11:29:39 managed-node2 dbus-broker-launch[650]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Dec 21 11:29:39 managed-node2 dbus-broker-launch[650]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Dec 21 11:29:39 managed-node2 systemd[1]: Started run-rd506baa9bc654e8487947a38b13ae5f9.service - /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-rd506baa9bc654e8487947a38b13ae5f9.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-rd506baa9bc654e8487947a38b13ae5f9.service has finished successfully. ░░ ░░ The job identifier is 1653. Dec 21 11:29:39 managed-node2 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 1731. Dec 21 11:29:39 managed-node2 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Dec 21 11:29:39 managed-node2 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 1731. Dec 21 11:29:40 managed-node2 systemd[1]: run-rd506baa9bc654e8487947a38b13ae5f9.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-rd506baa9bc654e8487947a38b13ae5f9.service has successfully entered the 'dead' state. Dec 21 11:29:40 managed-node2 python3.12[11744]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Dec 21 11:29:41 managed-node2 python3.12[11904]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Dec 21 11:29:42 managed-node2 kernel: SELinux: Converting 473 SID table entries... Dec 21 11:29:42 managed-node2 kernel: SELinux: policy capability network_peer_controls=1 Dec 21 11:29:42 managed-node2 kernel: SELinux: policy capability open_perms=1 Dec 21 11:29:42 managed-node2 kernel: SELinux: policy capability extended_socket_class=1 Dec 21 11:29:42 managed-node2 kernel: SELinux: policy capability always_check_network=0 Dec 21 11:29:42 managed-node2 kernel: SELinux: policy capability cgroup_seclabel=1 Dec 21 11:29:42 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1 Dec 21 11:29:42 managed-node2 kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Dec 21 11:29:42 managed-node2 kernel: SELinux: policy capability ioctl_skip_cloexec=0 Dec 21 11:29:42 managed-node2 kernel: SELinux: policy capability userspace_initial_context=0 Dec 21 11:29:43 managed-node2 python3.12[12039]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Dec 21 11:29:47 managed-node2 python3.12[12170]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:29:49 managed-node2 python3.12[12303]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:29:49 managed-node2 python3.12[12434]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:29:50 managed-node2 python3.12[12565]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 21 11:29:50 managed-node2 python3.12[12670]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/nopull.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1734798590.1196222-8811-30159580220340/.source.yml _original_basename=.aqgkwxyo follow=False checksum=d5dc917e3cae36de03aa971a17ac473f86fdf934 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:29:51 managed-node2 python3.12[12801]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Dec 21 11:29:51 managed-node2 systemd[1]: var-lib-containers-storage-overlay-compat532505600-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-compat532505600-merged.mount has successfully entered the 'dead' state. Dec 21 11:29:51 managed-node2 kernel: evm: overlay not supported Dec 21 11:29:51 managed-node2 systemd[1]: var-lib-containers-storage-overlay-metacopy\x2dcheck191434635-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-metacopy\x2dcheck191434635-merged.mount has successfully entered the 'dead' state. Dec 21 11:29:51 managed-node2 podman[12808]: 2024-12-21 11:29:51.419964587 -0500 EST m=+0.084309518 system refresh Dec 21 11:29:51 managed-node2 podman[12808]: 2024-12-21 11:29:51.709193583 -0500 EST m=+0.373538553 image build e20c1c9f26b9ea9ec461cf486e641689385d0ba8255636809818a6a36925e38d Dec 21 11:29:51 managed-node2 systemd[1]: Created slice machine.slice - Slice /machine. ░░ Subject: A start job for unit machine.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine.slice has finished successfully. ░░ ░░ The job identifier is 1811. Dec 21 11:29:51 managed-node2 systemd[1]: Created slice machine-libpod_pod_d8bc925aaaaf05748024a1eca8236513e8b0b42d75fa35be2e088660fdac8222.slice - cgroup machine-libpod_pod_d8bc925aaaaf05748024a1eca8236513e8b0b42d75fa35be2e088660fdac8222.slice. ░░ Subject: A start job for unit machine-libpod_pod_d8bc925aaaaf05748024a1eca8236513e8b0b42d75fa35be2e088660fdac8222.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_d8bc925aaaaf05748024a1eca8236513e8b0b42d75fa35be2e088660fdac8222.slice has finished successfully. ░░ ░░ The job identifier is 1810. Dec 21 11:29:51 managed-node2 podman[12808]: 2024-12-21 11:29:51.764790109 -0500 EST m=+0.429134964 container create 2a78710994ed4ba47575e365ba77535c8e2641751f6fe675fe15b881b953afc7 (image=localhost/podman-pause:5.3.1-1733097600, name=d8bc925aaaaf-infra, pod_id=d8bc925aaaaf05748024a1eca8236513e8b0b42d75fa35be2e088660fdac8222, io.buildah.version=1.38.0) Dec 21 11:29:51 managed-node2 podman[12808]: 2024-12-21 11:29:51.770826062 -0500 EST m=+0.435170985 pod create d8bc925aaaaf05748024a1eca8236513e8b0b42d75fa35be2e088660fdac8222 (image=, name=nopull) Dec 21 11:29:54 managed-node2 podman[12808]: 2024-12-21 11:29:54.359704834 -0500 EST m=+3.024049708 container create f8b8aa0a43ea756c41241f4db78726f9877d7593f073592d986431bf59b6933a (image=quay.io/libpod/testimage:20210610, name=nopull-nopull, pod_id=d8bc925aaaaf05748024a1eca8236513e8b0b42d75fa35be2e088660fdac8222, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test) Dec 21 11:29:54 managed-node2 podman[12808]: 2024-12-21 11:29:54.335139553 -0500 EST m=+2.999484874 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Dec 21 11:29:54 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Dec 21 11:29:57 managed-node2 python3.12[13140]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:29:58 managed-node2 python3.12[13278]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:30:00 managed-node2 python3.12[13411]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 21 11:30:01 managed-node2 python3.12[13543]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Dec 21 11:30:02 managed-node2 python3.12[13676]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Dec 21 11:30:02 managed-node2 python3.12[13809]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Dec 21 11:30:04 managed-node2 python3.12[13940]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 21 11:30:05 managed-node2 python3.12[14072]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 21 11:30:06 managed-node2 python3.12[14204]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Dec 21 11:30:07 managed-node2 python3.12[14364]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Dec 21 11:30:08 managed-node2 python3.12[14495]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Dec 21 11:30:12 managed-node2 python3.12[14626]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:30:15 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Dec 21 11:30:15 managed-node2 podman[14767]: 2024-12-21 11:30:15.864950757 -0500 EST m=+0.297338995 image pull-error quay.io/linux-system-roles/this_is_a_bogus_image:latest initializing source docker://quay.io/linux-system-roles/this_is_a_bogus_image:latest: reading manifest latest in quay.io/linux-system-roles/this_is_a_bogus_image: unauthorized: access to the requested resource is not authorized Dec 21 11:30:15 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Dec 21 11:30:16 managed-node2 python3.12[14904]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:30:16 managed-node2 python3.12[15035]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:30:17 managed-node2 python3.12[15166]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 21 11:30:17 managed-node2 python3.12[15271]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/bogus.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1734798617.0706573-10035-101263414885060/.source.yml _original_basename=.vzt_imjn follow=False checksum=f8266a972ed3be7e204d2a67883fe3a22b8dbf18 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:30:18 managed-node2 python3.12[15402]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Dec 21 11:30:18 managed-node2 systemd[1]: Created slice machine-libpod_pod_4570ef15e0105deb2814075cc626ddc7d34977cf019d8e2e4e23ba7b5458482a.slice - cgroup machine-libpod_pod_4570ef15e0105deb2814075cc626ddc7d34977cf019d8e2e4e23ba7b5458482a.slice. ░░ Subject: A start job for unit machine-libpod_pod_4570ef15e0105deb2814075cc626ddc7d34977cf019d8e2e4e23ba7b5458482a.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_4570ef15e0105deb2814075cc626ddc7d34977cf019d8e2e4e23ba7b5458482a.slice has finished successfully. ░░ ░░ The job identifier is 1816. Dec 21 11:30:18 managed-node2 podman[15410]: 2024-12-21 11:30:18.294532072 -0500 EST m=+0.064559515 container create 0fc4d258a402051284f5527e6b09250ac3414eb3c8d1b124f1f52efe50fc038e (image=localhost/podman-pause:5.3.1-1733097600, name=4570ef15e010-infra, pod_id=4570ef15e0105deb2814075cc626ddc7d34977cf019d8e2e4e23ba7b5458482a, io.buildah.version=1.38.0) Dec 21 11:30:18 managed-node2 podman[15410]: 2024-12-21 11:30:18.300736287 -0500 EST m=+0.070763722 pod create 4570ef15e0105deb2814075cc626ddc7d34977cf019d8e2e4e23ba7b5458482a (image=, name=bogus) Dec 21 11:30:18 managed-node2 podman[15410]: 2024-12-21 11:30:18.651734133 -0500 EST m=+0.421761632 image pull-error quay.io/linux-system-roles/this_is_a_bogus_image:latest initializing source docker://quay.io/linux-system-roles/this_is_a_bogus_image:latest: reading manifest latest in quay.io/linux-system-roles/this_is_a_bogus_image: unauthorized: access to the requested resource is not authorized Dec 21 11:30:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Dec 21 11:30:21 managed-node2 python3.12[15679]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:30:22 managed-node2 python3.12[15816]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:30:25 managed-node2 python3.12[15949]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 21 11:30:27 managed-node2 python3.12[16081]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Dec 21 11:30:27 managed-node2 python3.12[16214]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Dec 21 11:30:28 managed-node2 python3.12[16347]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Dec 21 11:30:30 managed-node2 python3.12[16478]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 21 11:30:31 managed-node2 python3.12[16610]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 21 11:30:33 managed-node2 python3.12[16742]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Dec 21 11:30:35 managed-node2 python3.12[16902]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Dec 21 11:30:35 managed-node2 python3.12[17033]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Dec 21 11:30:40 managed-node2 python3.12[17164]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:30:42 managed-node2 python3.12[17297]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/nopull.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:30:43 managed-node2 python3.12[17429]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-nopull.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Dec 21 11:30:43 managed-node2 python3.12[17562]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:30:44 managed-node2 python3.12[17695]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Dec 21 11:30:44 managed-node2 python3.12[17695]: ansible-containers.podman.podman_play version: 5.3.1, kube file /etc/containers/ansible-kubernetes.d/nopull.yml Dec 21 11:30:44 managed-node2 podman[17702]: 2024-12-21 11:30:44.166754556 -0500 EST m=+0.025898847 pod stop d8bc925aaaaf05748024a1eca8236513e8b0b42d75fa35be2e088660fdac8222 (image=, name=nopull) Dec 21 11:30:44 managed-node2 systemd[1]: Removed slice machine-libpod_pod_d8bc925aaaaf05748024a1eca8236513e8b0b42d75fa35be2e088660fdac8222.slice - cgroup machine-libpod_pod_d8bc925aaaaf05748024a1eca8236513e8b0b42d75fa35be2e088660fdac8222.slice. ░░ Subject: A stop job for unit machine-libpod_pod_d8bc925aaaaf05748024a1eca8236513e8b0b42d75fa35be2e088660fdac8222.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_d8bc925aaaaf05748024a1eca8236513e8b0b42d75fa35be2e088660fdac8222.slice has finished. ░░ ░░ The job identifier is 1822 and the job result is done. Dec 21 11:30:44 managed-node2 podman[17702]: 2024-12-21 11:30:44.21250215 -0500 EST m=+0.071646315 container remove f8b8aa0a43ea756c41241f4db78726f9877d7593f073592d986431bf59b6933a (image=quay.io/libpod/testimage:20210610, name=nopull-nopull, pod_id=d8bc925aaaaf05748024a1eca8236513e8b0b42d75fa35be2e088660fdac8222, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Dec 21 11:30:44 managed-node2 podman[17702]: 2024-12-21 11:30:44.239212917 -0500 EST m=+0.098357082 container remove 2a78710994ed4ba47575e365ba77535c8e2641751f6fe675fe15b881b953afc7 (image=localhost/podman-pause:5.3.1-1733097600, name=d8bc925aaaaf-infra, pod_id=d8bc925aaaaf05748024a1eca8236513e8b0b42d75fa35be2e088660fdac8222, io.buildah.version=1.38.0) Dec 21 11:30:44 managed-node2 podman[17702]: 2024-12-21 11:30:44.248968909 -0500 EST m=+0.108113041 pod remove d8bc925aaaaf05748024a1eca8236513e8b0b42d75fa35be2e088660fdac8222 (image=, name=nopull) Dec 21 11:30:44 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Dec 21 11:30:44 managed-node2 python3.12[17841]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:30:45 managed-node2 python3.12[17972]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:30:45 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Dec 21 11:30:48 managed-node2 python3.12[18242]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:30:49 managed-node2 python3.12[18380]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:30:53 managed-node2 python3.12[18513]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 21 11:30:54 managed-node2 python3.12[18645]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Dec 21 11:30:54 managed-node2 python3.12[18778]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Dec 21 11:30:55 managed-node2 python3.12[18911]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Dec 21 11:30:58 managed-node2 python3.12[19042]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 21 11:30:59 managed-node2 python3.12[19174]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 21 11:31:00 managed-node2 python3.12[19306]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Dec 21 11:31:02 managed-node2 python3.12[19466]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Dec 21 11:31:03 managed-node2 python3.12[19597]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Dec 21 11:31:07 managed-node2 python3.12[19728]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:31:09 managed-node2 python3.12[19861]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/bogus.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:31:10 managed-node2 python3.12[19993]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-bogus.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Dec 21 11:31:10 managed-node2 python3.12[20126]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:31:11 managed-node2 python3.12[20259]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Dec 21 11:31:11 managed-node2 python3.12[20259]: ansible-containers.podman.podman_play version: 5.3.1, kube file /etc/containers/ansible-kubernetes.d/bogus.yml Dec 21 11:31:11 managed-node2 podman[20266]: 2024-12-21 11:31:11.158046209 -0500 EST m=+0.026730920 pod stop 4570ef15e0105deb2814075cc626ddc7d34977cf019d8e2e4e23ba7b5458482a (image=, name=bogus) Dec 21 11:31:11 managed-node2 systemd[1]: Removed slice machine-libpod_pod_4570ef15e0105deb2814075cc626ddc7d34977cf019d8e2e4e23ba7b5458482a.slice - cgroup machine-libpod_pod_4570ef15e0105deb2814075cc626ddc7d34977cf019d8e2e4e23ba7b5458482a.slice. ░░ Subject: A stop job for unit machine-libpod_pod_4570ef15e0105deb2814075cc626ddc7d34977cf019d8e2e4e23ba7b5458482a.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_4570ef15e0105deb2814075cc626ddc7d34977cf019d8e2e4e23ba7b5458482a.slice has finished. ░░ ░░ The job identifier is 1824 and the job result is done. Dec 21 11:31:11 managed-node2 podman[20266]: 2024-12-21 11:31:11.199420481 -0500 EST m=+0.068105107 container remove 0fc4d258a402051284f5527e6b09250ac3414eb3c8d1b124f1f52efe50fc038e (image=localhost/podman-pause:5.3.1-1733097600, name=4570ef15e010-infra, pod_id=4570ef15e0105deb2814075cc626ddc7d34977cf019d8e2e4e23ba7b5458482a, io.buildah.version=1.38.0) Dec 21 11:31:11 managed-node2 podman[20266]: 2024-12-21 11:31:11.20957842 -0500 EST m=+0.078263040 pod remove 4570ef15e0105deb2814075cc626ddc7d34977cf019d8e2e4e23ba7b5458482a (image=, name=bogus) Dec 21 11:31:11 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Dec 21 11:31:11 managed-node2 python3.12[20405]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:31:12 managed-node2 python3.12[20536]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:31:12 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Dec 21 11:31:15 managed-node2 python3.12[20805]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:31:17 managed-node2 python3.12[20942]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:31:20 managed-node2 python3.12[21075]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 21 11:31:22 managed-node2 python3.12[21207]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Dec 21 11:31:22 managed-node2 python3.12[21340]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Dec 21 11:31:23 managed-node2 python3.12[21473]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Dec 21 11:31:26 managed-node2 python3.12[21604]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 21 11:31:27 managed-node2 python3.12[21736]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 21 11:31:28 managed-node2 python3.12[21868]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Dec 21 11:31:30 managed-node2 python3.12[22028]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Dec 21 11:31:30 managed-node2 python3.12[22159]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Dec 21 11:31:35 managed-node2 python3.12[22290]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Dec 21 11:31:36 managed-node2 python3.12[22422]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:31:36 managed-node2 python3.12[22555]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:31:37 managed-node2 python3.12[22687]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:31:38 managed-node2 python3.12[22819]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:31:39 managed-node2 python3.12[22951]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl enable-linger podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Dec 21 11:31:39 managed-node2 systemd[1]: Created slice user-3001.slice - User Slice of UID 3001. ░░ Subject: A start job for unit user-3001.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-3001.slice has finished successfully. ░░ ░░ The job identifier is 1827. Dec 21 11:31:39 managed-node2 systemd[1]: Starting user-runtime-dir@3001.service - User Runtime Directory /run/user/3001... ░░ Subject: A start job for unit user-runtime-dir@3001.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@3001.service has begun execution. ░░ ░░ The job identifier is 1826. Dec 21 11:31:39 managed-node2 systemd[1]: Finished user-runtime-dir@3001.service - User Runtime Directory /run/user/3001. ░░ Subject: A start job for unit user-runtime-dir@3001.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@3001.service has finished successfully. ░░ ░░ The job identifier is 1826. Dec 21 11:31:39 managed-node2 systemd[1]: Starting user@3001.service - User Manager for UID 3001... ░░ Subject: A start job for unit user@3001.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@3001.service has begun execution. ░░ ░░ The job identifier is 1906. Dec 21 11:31:39 managed-node2 systemd-logind[659]: New session 6 of user podman_basic_user. ░░ Subject: A new session 6 has been created for user podman_basic_user ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 6 has been created for the user podman_basic_user. ░░ ░░ The leading process of the session is 22955. Dec 21 11:31:39 managed-node2 (systemd)[22955]: pam_unix(systemd-user:session): session opened for user podman_basic_user(uid=3001) by podman_basic_user(uid=0) Dec 21 11:31:39 managed-node2 systemd[22955]: Queued start job for default target default.target. Dec 21 11:31:39 managed-node2 systemd[22955]: Created slice app.slice - User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 5. Dec 21 11:31:39 managed-node2 systemd[22955]: Started grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 11. Dec 21 11:31:39 managed-node2 systemd[22955]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Dec 21 11:31:39 managed-node2 systemd[22955]: Reached target paths.target - Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 8. Dec 21 11:31:39 managed-node2 systemd[22955]: Reached target timers.target - Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 10. Dec 21 11:31:39 managed-node2 systemd[22955]: Starting dbus.socket - D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 4. Dec 21 11:31:39 managed-node2 systemd[22955]: Starting systemd-tmpfiles-setup.service - Create User Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 9. Dec 21 11:31:39 managed-node2 systemd[22955]: Finished systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 9. Dec 21 11:31:39 managed-node2 systemd[22955]: Listening on dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 4. Dec 21 11:31:39 managed-node2 systemd[22955]: Reached target sockets.target - Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Dec 21 11:31:39 managed-node2 systemd[22955]: Reached target basic.target - Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Dec 21 11:31:39 managed-node2 systemd[22955]: Reached target default.target - Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Dec 21 11:31:39 managed-node2 systemd[22955]: Startup finished in 71ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 3001 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 71781 microseconds. Dec 21 11:31:39 managed-node2 systemd[1]: Started user@3001.service - User Manager for UID 3001. ░░ Subject: A start job for unit user@3001.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@3001.service has finished successfully. ░░ ░░ The job identifier is 1906. Dec 21 11:31:40 managed-node2 python3.12[23101]: ansible-file Invoked with path=/tmp/lsr_9pquyim__podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:31:40 managed-node2 python3.12[23232]: ansible-file Invoked with path=/tmp/lsr_9pquyim__podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:31:41 managed-node2 sudo[23405]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mlxgabbcqgabjikhfvvgbxwcejnqigki ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1734798700.790551-14193-4218201843360/AnsiballZ_podman_image.py' Dec 21 11:31:41 managed-node2 sudo[23405]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-23405) opened. Dec 21 11:31:41 managed-node2 sudo[23405]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Dec 21 11:31:41 managed-node2 systemd[22955]: Created slice session.slice - User Core Session Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 18. Dec 21 11:31:41 managed-node2 systemd[22955]: Starting dbus-broker.service - D-Bus User Message Bus... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Dec 21 11:31:41 managed-node2 dbus-broker-launch[23428]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Dec 21 11:31:41 managed-node2 dbus-broker-launch[23428]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Dec 21 11:31:41 managed-node2 systemd[22955]: Started dbus-broker.service - D-Bus User Message Bus. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Dec 21 11:31:41 managed-node2 dbus-broker-launch[23428]: Ready Dec 21 11:31:41 managed-node2 systemd[22955]: Created slice user.slice - Slice /user. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 20. Dec 21 11:31:41 managed-node2 systemd[22955]: Started podman-23415.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 19. Dec 21 11:31:41 managed-node2 systemd[22955]: Started podman-pause-2085f4d5.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 23. Dec 21 11:31:41 managed-node2 systemd[22955]: Started podman-23431.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 27. Dec 21 11:31:42 managed-node2 systemd[22955]: Started podman-23456.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 31. Dec 21 11:31:42 managed-node2 sudo[23405]: pam_unix(sudo:session): session closed for user podman_basic_user Dec 21 11:31:43 managed-node2 python3.12[23594]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:31:43 managed-node2 python3.12[23725]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:31:44 managed-node2 python3.12[23856]: ansible-ansible.legacy.stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 21 11:31:44 managed-node2 python3.12[23961]: ansible-ansible.legacy.copy Invoked with dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml owner=podman_basic_user group=3001 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1734798703.8676434-14342-119477966397925/.source.yml _original_basename=.hf_9mcra follow=False checksum=feb9b844cfd9411d5e266c9ae51c9bc39858ae7b backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:31:44 managed-node2 sudo[24134]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nbzglxotblskbezkxdxfzroesmpbdbeu ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1734798704.6384764-14383-112561728099746/AnsiballZ_podman_play.py' Dec 21 11:31:44 managed-node2 sudo[24134]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-24134) opened. Dec 21 11:31:44 managed-node2 sudo[24134]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Dec 21 11:31:45 managed-node2 python3.12[24137]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Dec 21 11:31:45 managed-node2 systemd[22955]: Started podman-24145.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 35. Dec 21 11:31:45 managed-node2 systemd[22955]: Created slice user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice - cgroup user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 39. Dec 21 11:31:45 managed-node2 kernel: tun: Universal TUN/TAP device driver, 1.6 Dec 21 11:31:45 managed-node2 systemd[22955]: Started rootless-netns-08629e8e.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 43. Dec 21 11:31:45 managed-node2 kernel: bridge: filtering via arp/ip/ip6tables is no longer available by default. Update your scripts to load br_netfilter if you need this. Dec 21 11:31:45 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Dec 21 11:31:45 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Dec 21 11:31:45 managed-node2 kernel: veth0: entered allmulticast mode Dec 21 11:31:45 managed-node2 kernel: veth0: entered promiscuous mode Dec 21 11:31:45 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Dec 21 11:31:45 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Dec 21 11:31:45 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Dec 21 11:31:45 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Dec 21 11:31:45 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Dec 21 11:31:45 managed-node2 systemd[22955]: Started run-r539aaca40e7043889c4c288547e1a016.scope - /usr/libexec/podman/aardvark-dns --config /run/user/3001/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 47. Dec 21 11:31:45 managed-node2 aardvark-dns[24228]: starting aardvark on a child with pid 24229 Dec 21 11:31:45 managed-node2 aardvark-dns[24229]: Successfully parsed config Dec 21 11:31:45 managed-node2 aardvark-dns[24229]: Listen v4 ip {"podman-default-kube-network": [10.89.0.1]} Dec 21 11:31:45 managed-node2 aardvark-dns[24229]: Listen v6 ip {} Dec 21 11:31:45 managed-node2 aardvark-dns[24229]: Using the following upstream servers: [169.254.1.1:53, 10.29.169.13:53, 10.29.170.12:53] Dec 21 11:31:45 managed-node2 conmon[24244]: conmon db0839eafbe8b5b2b368 : failed to write to /proc/self/oom_score_adj: Permission denied Dec 21 11:31:45 managed-node2 systemd[22955]: Started libpod-conmon-db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 51. Dec 21 11:31:45 managed-node2 conmon[24245]: conmon db0839eafbe8b5b2b368 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/14/attach} Dec 21 11:31:45 managed-node2 conmon[24245]: conmon db0839eafbe8b5b2b368 : terminal_ctrl_fd: 14 Dec 21 11:31:45 managed-node2 conmon[24245]: conmon db0839eafbe8b5b2b368 : winsz read side: 17, winsz write side: 18 Dec 21 11:31:45 managed-node2 systemd[22955]: Started libpod-db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 56. Dec 21 11:31:45 managed-node2 conmon[24245]: conmon db0839eafbe8b5b2b368 : container PID: 24247 Dec 21 11:31:45 managed-node2 conmon[24249]: conmon a6748a195df11a7b054e : failed to write to /proc/self/oom_score_adj: Permission denied Dec 21 11:31:45 managed-node2 systemd[22955]: Started libpod-conmon-a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 61. Dec 21 11:31:45 managed-node2 conmon[24250]: conmon a6748a195df11a7b054e : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/13/attach} Dec 21 11:31:45 managed-node2 conmon[24250]: conmon a6748a195df11a7b054e : terminal_ctrl_fd: 13 Dec 21 11:31:45 managed-node2 conmon[24250]: conmon a6748a195df11a7b054e : winsz read side: 16, winsz write side: 17 Dec 21 11:31:45 managed-node2 systemd[22955]: Started libpod-a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 66. Dec 21 11:31:45 managed-node2 conmon[24250]: conmon a6748a195df11a7b054e : container PID: 24252 Dec 21 11:31:45 managed-node2 python3.12[24137]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Dec 21 11:31:45 managed-node2 python3.12[24137]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod: a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f Container: a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18 Dec 21 11:31:45 managed-node2 python3.12[24137]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2024-12-21T11:31:45-05:00" level=info msg="/bin/podman filtering at log level debug" time="2024-12-21T11:31:45-05:00" level=debug msg="Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2024-12-21T11:31:45-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2024-12-21T11:31:45-05:00" level=info msg="Using sqlite as database backend" time="2024-12-21T11:31:45-05:00" level=debug msg="systemd-logind: Unknown object '/'." time="2024-12-21T11:31:45-05:00" level=debug msg="Using graph driver overlay" time="2024-12-21T11:31:45-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" time="2024-12-21T11:31:45-05:00" level=debug msg="Using run root /run/user/3001/containers" time="2024-12-21T11:31:45-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" time="2024-12-21T11:31:45-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" time="2024-12-21T11:31:45-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" time="2024-12-21T11:31:45-05:00" level=debug msg="Using transient store: false" time="2024-12-21T11:31:45-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2024-12-21T11:31:45-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2024-12-21T11:31:45-05:00" level=debug msg="Cached value indicated that metacopy is not being used" time="2024-12-21T11:31:45-05:00" level=debug msg="Cached value indicated that native-diff is usable" time="2024-12-21T11:31:45-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" time="2024-12-21T11:31:45-05:00" level=debug msg="Initializing event backend file" time="2024-12-21T11:31:45-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2024-12-21T11:31:45-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2024-12-21T11:31:45-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2024-12-21T11:31:45-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" time="2024-12-21T11:31:45-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2024-12-21T11:31:45-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2024-12-21T11:31:45-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2024-12-21T11:31:45-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" time="2024-12-21T11:31:45-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2024-12-21T11:31:45-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" time="2024-12-21T11:31:45-05:00" level=info msg="Setting parallel job count to 7" time="2024-12-21T11:31:45-05:00" level=debug msg="Successfully loaded 1 networks" time="2024-12-21T11:31:45-05:00" level=debug msg="found free device name podman1" time="2024-12-21T11:31:45-05:00" level=debug msg="found free ipv4 network subnet 10.89.0.0/24" time="2024-12-21T11:31:45-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2024-12-21T11:31:45-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-21T11:31:45-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2024-12-21T11:31:45-05:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\" does not resolve to an image ID" time="2024-12-21T11:31:45-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2024-12-21T11:31:45-05:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\" does not resolve to an image ID" time="2024-12-21T11:31:45-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2024-12-21T11:31:45-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-21T11:31:45-05:00" level=debug msg="FROM \"scratch\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are not supported" time="2024-12-21T11:31:45-05:00" level=debug msg="Check for idmapped mounts support " time="2024-12-21T11:31:45-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-21T11:31:45-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-21T11:31:45-05:00" level=debug msg="overlay: test mount indicated that volatile is being used" time="2024-12-21T11:31:45-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/c5eac758b864e56a456f3167d9879006a4d1585c6fdda99948f4097d640d7701/empty,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/c5eac758b864e56a456f3167d9879006a4d1585c6fdda99948f4097d640d7701/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/c5eac758b864e56a456f3167d9879006a4d1585c6fdda99948f4097d640d7701/work,userxattr,volatile,context=\"system_u:object_r:container_file_t:s0:c234,c998\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Container ID: 62b696e6e009827713e5104e7c602f2963c9d871df9a93b1f76952070a49c3d3" time="2024-12-21T11:31:45-05:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:copy Args:[/usr/libexec/podman/catatonit /catatonit] Flags:[] Attrs:map[] Message:COPY /usr/libexec/podman/catatonit /catatonit Heredocs:[] Original:COPY /usr/libexec/podman/catatonit /catatonit}" time="2024-12-21T11:31:45-05:00" level=debug msg="COPY []string(nil), imagebuilder.Copy{FromFS:false, From:\"\", Src:[]string{\"/usr/libexec/podman/catatonit\"}, Dest:\"/catatonit\", Download:false, Chown:\"\", Chmod:\"\", Checksum:\"\", Files:[]imagebuilder.File(nil), KeepGitDir:false, Link:false, Parents:false, Excludes:[]string(nil)}" time="2024-12-21T11:31:45-05:00" level=debug msg="EnsureContainerPath \"/\" (owner \"\", mode 0) in \"62b696e6e009827713e5104e7c602f2963c9d871df9a93b1f76952070a49c3d3\"" time="2024-12-21T11:31:45-05:00" level=debug msg="added content file:872045bc026632654869cebc14f91fe531969760bf26925e889d9b70527c8b67" time="2024-12-21T11:31:45-05:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:entrypoint Args:[/catatonit -P] Flags:[] Attrs:map[json:true] Message:ENTRYPOINT /catatonit -P Heredocs:[] Original:ENTRYPOINT [\"/catatonit\", \"-P\"]}" time="2024-12-21T11:31:45-05:00" level=debug msg="EnsureContainerPath \"/\" (owner \"\", mode 0) in \"62b696e6e009827713e5104e7c602f2963c9d871df9a93b1f76952070a49c3d3\"" time="2024-12-21T11:31:45-05:00" level=debug msg="COMMIT localhost/podman-pause:5.3.1-1733097600" time="2024-12-21T11:31:45-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\"" time="2024-12-21T11:31:45-05:00" level=debug msg="COMMIT \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\"" time="2024-12-21T11:31:45-05:00" level=debug msg="committing image with reference \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\" is allowed by policy" time="2024-12-21T11:31:45-05:00" level=debug msg="layer list: [\"c5eac758b864e56a456f3167d9879006a4d1585c6fdda99948f4097d640d7701\"]" time="2024-12-21T11:31:45-05:00" level=debug msg="using \"/var/tmp/buildah502132700\" to hold temporary data" time="2024-12-21T11:31:45-05:00" level=debug msg="Tar with options on /home/podman_basic_user/.local/share/containers/storage/overlay/c5eac758b864e56a456f3167d9879006a4d1585c6fdda99948f4097d640d7701/diff" time="2024-12-21T11:31:45-05:00" level=debug msg="layer \"c5eac758b864e56a456f3167d9879006a4d1585c6fdda99948f4097d640d7701\" size is 699392 bytes, uncompressed digest sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6, possibly-compressed digest sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6" time="2024-12-21T11:31:45-05:00" level=debug msg="OCIv1 config = {\"created\":\"2024-12-21T16:31:45.305846253Z\",\"architecture\":\"amd64\",\"os\":\"linux\",\"config\":{\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Entrypoint\":[\"/catatonit\",\"-P\"],\"WorkingDir\":\"/\",\"Labels\":{\"io.buildah.version\":\"1.38.0\"}},\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"]},\"history\":[{\"created\":\"2024-12-21T16:31:45.27727152Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:872045bc026632654869cebc14f91fe531969760bf26925e889d9b70527c8b67 in /catatonit \",\"empty_layer\":true},{\"created\":\"2024-12-21T16:31:45.308897263Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2024-12-21T11:31:45-05:00" level=debug msg="OCIv1 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.oci.image.manifest.v1+json\",\"config\":{\"mediaType\":\"application/vnd.oci.image.config.v1+json\",\"digest\":\"sha256:c3f09c67e7a58bd4757acf02fed5a1339d63b8f733699e9ec3924633a599d4d7\",\"size\":684},\"layers\":[{\"mediaType\":\"application/vnd.oci.image.layer.v1.tar\",\"digest\":\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\",\"size\":699392}],\"annotations\":{\"org.opencontainers.image.base.digest\":\"\",\"org.opencontainers.image.base.name\":\"\"}}" time="2024-12-21T11:31:45-05:00" level=debug msg="Docker v2s2 config = {\"created\":\"2024-12-21T16:31:45.305846253Z\",\"container\":\"62b696e6e009827713e5104e7c602f2963c9d871df9a93b1f76952070a49c3d3\",\"container_config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":null,\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"/\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.38.0\"}},\"config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":null,\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"/\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.38.0\"}},\"architecture\":\"amd64\",\"os\":\"linux\",\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"]},\"history\":[{\"created\":\"2024-12-21T16:31:45.27727152Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:872045bc026632654869cebc14f91fe531969760bf26925e889d9b70527c8b67 in /catatonit \",\"empty_layer\":true},{\"created\":\"2024-12-21T16:31:45.308897263Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2024-12-21T11:31:45-05:00" level=debug msg="Docker v2s2 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.docker.distribution.manifest.v2+json\",\"config\":{\"mediaType\":\"application/vnd.docker.container.image.v1+json\",\"size\":1347,\"digest\":\"sha256:04b3243342b7e6e0adbf1081e9d3ced898cdc056758e229975a4bbaa2278f30f\"},\"layers\":[{\"mediaType\":\"application/vnd.docker.image.rootfs.diff.tar\",\"size\":699392,\"digest\":\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"}]}" time="2024-12-21T11:31:45-05:00" level=debug msg="Using SQLite blob info cache at /home/podman_basic_user/.local/share/containers/cache/blob-info-cache-v1.sqlite" time="2024-12-21T11:31:45-05:00" level=debug msg="IsRunningImageAllowed for image containers-storage:" time="2024-12-21T11:31:45-05:00" level=debug msg=" Using transport \"containers-storage\" policy section \"\"" time="2024-12-21T11:31:45-05:00" level=debug msg=" Requirement 0: allowed" time="2024-12-21T11:31:45-05:00" level=debug msg="Overall: allowed" time="2024-12-21T11:31:45-05:00" level=debug msg="start reading config" time="2024-12-21T11:31:45-05:00" level=debug msg="finished reading config" time="2024-12-21T11:31:45-05:00" level=debug msg="Manifest has MIME type application/vnd.oci.image.manifest.v1+json, ordered candidate list [application/vnd.oci.image.manifest.v1+json, application/vnd.docker.distribution.manifest.v2+json, application/vnd.docker.distribution.manifest.v1+prettyjws, application/vnd.docker.distribution.manifest.v1+json]" time="2024-12-21T11:31:45-05:00" level=debug msg="... will first try using the original manifest unmodified" time="2024-12-21T11:31:45-05:00" level=debug msg="Checking if we can reuse blob sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6: general substitution = true, compression for MIME type \"application/vnd.oci.image.layer.v1.tar\" = true" time="2024-12-21T11:31:45-05:00" level=debug msg="reading layer \"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"" time="2024-12-21T11:31:45-05:00" level=debug msg="No compression detected" time="2024-12-21T11:31:45-05:00" level=debug msg="Using original blob without modification" time="2024-12-21T11:31:45-05:00" level=debug msg="Applying tar in /home/podman_basic_user/.local/share/containers/storage/overlay/83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6/diff" time="2024-12-21T11:31:45-05:00" level=debug msg="finished reading layer \"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"" time="2024-12-21T11:31:45-05:00" level=debug msg="No compression detected" time="2024-12-21T11:31:45-05:00" level=debug msg="Compression change for blob sha256:c3f09c67e7a58bd4757acf02fed5a1339d63b8f733699e9ec3924633a599d4d7 (\"application/vnd.oci.image.config.v1+json\") not supported" time="2024-12-21T11:31:45-05:00" level=debug msg="Using original blob without modification" time="2024-12-21T11:31:45-05:00" level=debug msg="setting image creation date to 2024-12-21 16:31:45.305846253 +0000 UTC" time="2024-12-21T11:31:45-05:00" level=debug msg="created new image ID \"c3f09c67e7a58bd4757acf02fed5a1339d63b8f733699e9ec3924633a599d4d7\" with metadata \"{}\"" time="2024-12-21T11:31:45-05:00" level=debug msg="added name \"localhost/podman-pause:5.3.1-1733097600\" to image \"c3f09c67e7a58bd4757acf02fed5a1339d63b8f733699e9ec3924633a599d4d7\"" time="2024-12-21T11:31:45-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\"" time="2024-12-21T11:31:45-05:00" level=debug msg="printing final image id \"c3f09c67e7a58bd4757acf02fed5a1339d63b8f733699e9ec3924633a599d4d7\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Pod using bridge network mode" time="2024-12-21T11:31:45-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice for parent user.slice and name libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f" time="2024-12-21T11:31:45-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice" time="2024-12-21T11:31:45-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice" time="2024-12-21T11:31:45-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2024-12-21T11:31:45-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-21T11:31:45-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2024-12-21T11:31:45-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@c3f09c67e7a58bd4757acf02fed5a1339d63b8f733699e9ec3924633a599d4d7\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2024-12-21T11:31:45-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@c3f09c67e7a58bd4757acf02fed5a1339d63b8f733699e9ec3924633a599d4d7)" time="2024-12-21T11:31:45-05:00" level=debug msg="exporting opaque data as blob \"sha256:c3f09c67e7a58bd4757acf02fed5a1339d63b8f733699e9ec3924633a599d4d7\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Inspecting image c3f09c67e7a58bd4757acf02fed5a1339d63b8f733699e9ec3924633a599d4d7" time="2024-12-21T11:31:45-05:00" level=debug msg="exporting opaque data as blob \"sha256:c3f09c67e7a58bd4757acf02fed5a1339d63b8f733699e9ec3924633a599d4d7\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Inspecting image c3f09c67e7a58bd4757acf02fed5a1339d63b8f733699e9ec3924633a599d4d7" time="2024-12-21T11:31:45-05:00" level=debug msg="Inspecting image c3f09c67e7a58bd4757acf02fed5a1339d63b8f733699e9ec3924633a599d4d7" time="2024-12-21T11:31:45-05:00" level=debug msg="using systemd mode: false" time="2024-12-21T11:31:45-05:00" level=debug msg="setting container name a1abeaf778ea-infra" time="2024-12-21T11:31:45-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network eadc88e6c881bd13256e5545b5f705fde2984752335660ec32604c485b4d82ec bridge podman1 2024-12-21 11:31:45.134348204 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2024-12-21T11:31:45-05:00" level=debug msg="Successfully loaded 2 networks" time="2024-12-21T11:31:45-05:00" level=debug msg="Allocated lock 1 for container db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd" time="2024-12-21T11:31:45-05:00" level=debug msg="exporting opaque data as blob \"sha256:c3f09c67e7a58bd4757acf02fed5a1339d63b8f733699e9ec3924633a599d4d7\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Created container \"db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Container \"db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd/userdata\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Container \"db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd\" has run directory \"/run/user/3001/containers/overlay-containers/db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd/userdata\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-21T11:31:45-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-21T11:31:45-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-12-21T11:31:45-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-21T11:31:45-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-12-21T11:31:45-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2024-12-21T11:31:45-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-21T11:31:45-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-21T11:31:45-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-12-21T11:31:45-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-21T11:31:45-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-12-21T11:31:45-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-21T11:31:45-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-21T11:31:45-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-12-21T11:31:45-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-21T11:31:45-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-12-21T11:31:45-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-12-21T11:31:45-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:31:45-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-21T11:31:45-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-21T11:31:45-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-12-21T11:31:45-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-21T11:31:45-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-12-21T11:31:45-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-12-21T11:31:45-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:31:45-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-12-21T11:31:45-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-12-21T11:31:45-05:00" level=debug msg="using systemd mode: false" time="2024-12-21T11:31:45-05:00" level=debug msg="adding container to pod httpd1" time="2024-12-21T11:31:45-05:00" level=debug msg="setting container name httpd1-httpd1" time="2024-12-21T11:31:45-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2024-12-21T11:31:45-05:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2024-12-21T11:31:45-05:00" level=debug msg="Adding mount /proc" time="2024-12-21T11:31:45-05:00" level=debug msg="Adding mount /dev" time="2024-12-21T11:31:45-05:00" level=debug msg="Adding mount /dev/pts" time="2024-12-21T11:31:45-05:00" level=debug msg="Adding mount /dev/mqueue" time="2024-12-21T11:31:45-05:00" level=debug msg="Adding mount /sys" time="2024-12-21T11:31:45-05:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2024-12-21T11:31:45-05:00" level=debug msg="Allocated lock 2 for container a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18" time="2024-12-21T11:31:45-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Created container \"a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Container \"a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18/userdata\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Container \"a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18\" has run directory \"/run/user/3001/containers/overlay-containers/a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18/userdata\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Strongconnecting node db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd" time="2024-12-21T11:31:45-05:00" level=debug msg="Pushed db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd onto stack" time="2024-12-21T11:31:45-05:00" level=debug msg="Finishing node db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd. Popped db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd off stack" time="2024-12-21T11:31:45-05:00" level=debug msg="Strongconnecting node a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18" time="2024-12-21T11:31:45-05:00" level=debug msg="Pushed a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18 onto stack" time="2024-12-21T11:31:45-05:00" level=debug msg="Finishing node a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18. Popped a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18 off stack" time="2024-12-21T11:31:45-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/CMI66WDV4TLVHMI2F4V54VJ5MC,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/e1d213aeb5ded1ac291e18fa5ef816218bc192a61950348e6c5bcc38d826d567/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/e1d213aeb5ded1ac291e18fa5ef816218bc192a61950348e6c5bcc38d826d567/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c541,c840\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Made network namespace at /run/user/3001/netns/netns-9c865f86-001f-67d3-ac39-4d04ef79a213 for container db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd" time="2024-12-21T11:31:45-05:00" level=debug msg="Mounted container \"db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/e1d213aeb5ded1ac291e18fa5ef816218bc192a61950348e6c5bcc38d826d567/merged\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Created root filesystem for container db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd at /home/podman_basic_user/.local/share/containers/storage/overlay/e1d213aeb5ded1ac291e18fa5ef816218bc192a61950348e6c5bcc38d826d567/merged" time="2024-12-21T11:31:45-05:00" level=debug msg="Creating rootless network namespace at \"/run/user/3001/containers/networks/rootless-netns/rootless-netns\"" time="2024-12-21T11:31:45-05:00" level=debug msg="pasta arguments: --config-net --pid /run/user/3001/containers/networks/rootless-netns/rootless-netns-conn.pid --dns-forward 169.254.1.1 -t none -u none -T none -U none --no-map-gw --quiet --netns /run/user/3001/containers/networks/rootless-netns/rootless-netns --map-guest-addr 169.254.1.2" time="2024-12-21T11:31:45-05:00" level=debug msg="The path of /etc/resolv.conf in the mount ns is \"/etc/resolv.conf\"" [DEBUG netavark::network::validation] Validating network namespace... [DEBUG netavark::commands::setup] Setting up... [INFO netavark::firewall] Using nftables firewall driver [DEBUG netavark::network::bridge] Setup network podman-default-kube-network [DEBUG netavark::network::bridge] Container interface name: eth0 with IP addresses [10.89.0.2/24] [DEBUG netavark::network::bridge] Bridge name: podman1 with IP addresses [10.89.0.1/24] [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.ip_forward to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/podman1/rp_filter to 2 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv6/conf/eth0/autoconf to 0 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/arp_notify to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/rp_filter to 2 [INFO netavark::network::netlink] Adding route (dest: 0.0.0.0/0 ,gw: 10.89.0.1, metric 100) [INFO netavark::firewall::nft] Creating container chain nv_eadc88e6_10_89_0_0_nm24 [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.conf.podman1.route_localnet to 1 [DEBUG netavark::dns::aardvark] Spawning aardvark server [DEBUG netavark::dns::aardvark] start aardvark-dns: ["systemd-run", "-q", "--scope", "--user", "/usr/libexec/podman/aardvark-dns", "--config", "/run/user/3001/containers/networks/aardvark-dns", "-p", "53", "run"] [DEBUG netavark::commands::setup] { "podman-default-kube-network": StatusBlock { dns_search_domains: Some( [ "dns.podman", ], ), dns_server_ips: Some( [ 10.89.0.1, ], ), interfaces: Some( { "eth0": NetInterface { mac_address: "52:c1:93:d2:4b:a2", subnets: Some( [ NetAddress { gateway: Some( 10.89.0.1, ), ipnet: 10.89.0.2/24, }, ], ), }, }, ), }, } [DEBUG netavark::commands::setup] Setup complete time="2024-12-21T11:31:45-05:00" level=debug msg="rootlessport: time=\"2024-12-21T11:31:45-05:00\" level=info msg=\"Starting parent driver\"\n" time="2024-12-21T11:31:45-05:00" level=debug msg="rootlessport: time=\"2024-12-21T11:31:45-05:00\" level=info msg=\"opaque=map[builtin.readypipepath:/run/user/3001/libpod/tmp/rootlessport1625359831/.bp-ready.pipe builtin.socketpath:/run/user/3001/libpod/tmp/rootlessport1625359831/.bp.sock]\"\n" time="2024-12-21T11:31:45-05:00" level=debug msg="rootlessport: time=\"2024-12-21T11:31:45-05:00\" level=info msg=\"Starting child driver in child netns (\\\"/proc/self/exe\\\" [rootlessport-child])\"\n" time="2024-12-21T11:31:45-05:00" level=debug msg="rootlessport: time=\"2024-12-21T11:31:45-05:00\" level=info msg=\"Waiting for initComplete\"\n" time="2024-12-21T11:31:45-05:00" level=debug msg="rootlessport: time=\"2024-12-21T11:31:45-05:00\" level=info msg=\"initComplete is closed; parent and child established the communication channel\"\n" time="2024-12-21T11:31:45-05:00" level=debug msg="rootlessport: time=\"2024-12-21T11:31:45-05:00\" level=info msg=\"Exposing ports [{ 80 15001 1 tcp}]\"\n" time="2024-12-21T11:31:45-05:00" level=debug msg="rootlessport: time=\"2024-12-21T11:31:45-05:00\" level=info msg=Ready\n" time="2024-12-21T11:31:45-05:00" level=debug msg="rootlessport is ready" time="2024-12-21T11:31:45-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2024-12-21T11:31:45-05:00" level=debug msg="Setting Cgroups for container db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd to user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice:libpod:db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd" time="2024-12-21T11:31:45-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2024-12-21T11:31:45-05:00" level=debug msg="Workdir \"/\" resolved to host path \"/home/podman_basic_user/.local/share/containers/storage/overlay/e1d213aeb5ded1ac291e18fa5ef816218bc192a61950348e6c5bcc38d826d567/merged\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Created OCI spec for container db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd/userdata/config.json" time="2024-12-21T11:31:45-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice for parent user.slice and name libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f" time="2024-12-21T11:31:45-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice" time="2024-12-21T11:31:45-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice" time="2024-12-21T11:31:45-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2024-12-21T11:31:45-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd -u db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd -r /usr/bin/crun -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd/userdata -p /run/user/3001/containers/overlay-containers/db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd/userdata/pidfile -n a1abeaf778ea-infra --exit-dir /run/user/3001/libpod/tmp/exits --persist-dir /run/user/3001/libpod/tmp/persist/db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd --full-attach -s -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd]" time="2024-12-21T11:31:45-05:00" level=info msg="Running conmon under slice user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice and unitName libpod-conmon-db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd.scope" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2024-12-21T11:31:45-05:00" level=debug msg="Received: 24247" time="2024-12-21T11:31:45-05:00" level=info msg="Got Conmon PID as 24245" time="2024-12-21T11:31:45-05:00" level=debug msg="Created container db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd in OCI runtime" time="2024-12-21T11:31:45-05:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2024-12-21T11:31:45-05:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2024-12-21T11:31:45-05:00" level=debug msg="Starting container db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd with command [/catatonit -P]" time="2024-12-21T11:31:45-05:00" level=debug msg="Started container db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd" time="2024-12-21T11:31:45-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/GEOVXMGCF5JCURGOOQCQYSNIJA,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/b48d9d14f9126819028a1cfc335f0e84ac40a302b8ba8ccfeb0beebbbfd41d40/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/b48d9d14f9126819028a1cfc335f0e84ac40a302b8ba8ccfeb0beebbbfd41d40/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c541,c840\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Mounted container \"a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/b48d9d14f9126819028a1cfc335f0e84ac40a302b8ba8ccfeb0beebbbfd41d40/merged\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Created root filesystem for container a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18 at /home/podman_basic_user/.local/share/containers/storage/overlay/b48d9d14f9126819028a1cfc335f0e84ac40a302b8ba8ccfeb0beebbbfd41d40/merged" time="2024-12-21T11:31:45-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2024-12-21T11:31:45-05:00" level=debug msg="Setting Cgroups for container a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18 to user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice:libpod:a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18" time="2024-12-21T11:31:45-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2024-12-21T11:31:45-05:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2024-12-21T11:31:45-05:00" level=debug msg="Created OCI spec for container a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18/userdata/config.json" time="2024-12-21T11:31:45-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice for parent user.slice and name libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f" time="2024-12-21T11:31:45-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice" time="2024-12-21T11:31:45-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice" time="2024-12-21T11:31:45-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2024-12-21T11:31:45-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18 -u a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18 -r /usr/bin/crun -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18/userdata -p /run/user/3001/containers/overlay-containers/a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18/userdata/pidfile -n httpd1-httpd1 --exit-dir /run/user/3001/libpod/tmp/exits --persist-dir /run/user/3001/libpod/tmp/persist/a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18 --full-attach -s -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18]" time="2024-12-21T11:31:45-05:00" level=info msg="Running conmon under slice user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice and unitName libpod-conmon-a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18.scope" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2024-12-21T11:31:45-05:00" level=debug msg="Received: 24252" time="2024-12-21T11:31:45-05:00" level=info msg="Got Conmon PID as 24250" time="2024-12-21T11:31:45-05:00" level=debug msg="Created container a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18 in OCI runtime" time="2024-12-21T11:31:45-05:00" level=debug msg="Starting container a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18 with command [/bin/busybox-extras httpd -f -p 80]" time="2024-12-21T11:31:45-05:00" level=debug msg="Started container a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18" time="2024-12-21T11:31:45-05:00" level=debug msg="Called kube.PersistentPostRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2024-12-21T11:31:45-05:00" level=debug msg="Shutting down engines" time="2024-12-21T11:31:45-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=24145 Dec 21 11:31:45 managed-node2 python3.12[24137]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Dec 21 11:31:45 managed-node2 sudo[24134]: pam_unix(sudo:session): session closed for user podman_basic_user Dec 21 11:31:46 managed-node2 sudo[24426]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wivgzbfahdqhicbbuvtpitoziijccoml ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1734798706.2267947-14448-8737191580808/AnsiballZ_systemd.py' Dec 21 11:31:46 managed-node2 sudo[24426]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-24426) opened. Dec 21 11:31:46 managed-node2 sudo[24426]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Dec 21 11:31:46 managed-node2 python3.12[24429]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 21 11:31:46 managed-node2 systemd[22955]: Reload requested from client PID 24430 ('systemctl')... Dec 21 11:31:46 managed-node2 systemd[22955]: Reloading... Dec 21 11:31:46 managed-node2 systemd[22955]: Reloading finished in 44 ms. Dec 21 11:31:46 managed-node2 sudo[24426]: pam_unix(sudo:session): session closed for user podman_basic_user Dec 21 11:31:47 managed-node2 sudo[24613]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hdqbqkzbciigachoxfgninvtwlivcuml ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1734798706.9334629-14490-42851388239476/AnsiballZ_systemd.py' Dec 21 11:31:47 managed-node2 sudo[24613]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-24613) opened. Dec 21 11:31:47 managed-node2 sudo[24613]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Dec 21 11:31:47 managed-node2 python3.12[24616]: ansible-systemd Invoked with name=podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service scope=user enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Dec 21 11:31:47 managed-node2 systemd[22955]: Reload requested from client PID 24619 ('systemctl')... Dec 21 11:31:47 managed-node2 systemd[22955]: Reloading... Dec 21 11:31:47 managed-node2 systemd[22955]: Reloading finished in 44 ms. Dec 21 11:31:47 managed-node2 sudo[24613]: pam_unix(sudo:session): session closed for user podman_basic_user Dec 21 11:31:47 managed-node2 sudo[24801]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xvacrifcbgtqxqivedbyonzgyscppauw ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1734798707.6772003-14530-270047551956578/AnsiballZ_systemd.py' Dec 21 11:31:47 managed-node2 sudo[24801]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-24801) opened. Dec 21 11:31:47 managed-node2 sudo[24801]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Dec 21 11:31:48 managed-node2 python3.12[24804]: ansible-systemd Invoked with name=podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Dec 21 11:31:48 managed-node2 systemd[22955]: Created slice app-podman\x2dkube.slice - Slice /app/podman-kube. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 83. Dec 21 11:31:48 managed-node2 systemd[22955]: Starting podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 71. Dec 21 11:31:48 managed-node2 aardvark-dns[24229]: Received SIGHUP Dec 21 11:31:48 managed-node2 aardvark-dns[24229]: Successfully parsed config Dec 21 11:31:48 managed-node2 aardvark-dns[24229]: Listen v4 ip {} Dec 21 11:31:48 managed-node2 aardvark-dns[24229]: Listen v6 ip {} Dec 21 11:31:48 managed-node2 aardvark-dns[24229]: No configuration found stopping the sever Dec 21 11:31:48 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Dec 21 11:31:48 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Dec 21 11:31:48 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Dec 21 11:31:48 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd)" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=info msg="Using sqlite as database backend" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="systemd-logind: Unknown object '/'." Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Using graph driver overlay" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Using run root /run/user/3001/containers" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Using transient store: false" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Cached value indicated that overlay is supported" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Cached value indicated that overlay is supported" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Cached value indicated that metacopy is not being used" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Cached value indicated that native-diff is usable" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Initializing event backend file" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=info msg="Setting parallel job count to 7" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd)" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Shutting down engines" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=24818 Dec 21 11:31:58 managed-node2 podman[24807]: time="2024-12-21T11:31:58-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd1-httpd1 in 10 seconds, resorting to SIGKILL" Dec 21 11:31:58 managed-node2 conmon[24250]: conmon a6748a195df11a7b054e : container 24252 exited with status 137 Dec 21 11:31:58 managed-node2 /usr/bin/podman[24838]: time="2024-12-21T11:31:58-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18)" Dec 21 11:31:58 managed-node2 /usr/bin/podman[24838]: time="2024-12-21T11:31:58-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Dec 21 11:31:58 managed-node2 /usr/bin/podman[24838]: time="2024-12-21T11:31:58-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Dec 21 11:31:58 managed-node2 /usr/bin/podman[24838]: time="2024-12-21T11:31:58-05:00" level=info msg="Using sqlite as database backend" Dec 21 11:31:58 managed-node2 /usr/bin/podman[24838]: time="2024-12-21T11:31:58-05:00" level=debug msg="systemd-logind: Unknown object '/'." Dec 21 11:31:58 managed-node2 /usr/bin/podman[24838]: time="2024-12-21T11:31:58-05:00" level=debug msg="Using graph driver overlay" Dec 21 11:31:58 managed-node2 /usr/bin/podman[24838]: time="2024-12-21T11:31:58-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" Dec 21 11:31:58 managed-node2 /usr/bin/podman[24838]: time="2024-12-21T11:31:58-05:00" level=debug msg="Using run root /run/user/3001/containers" Dec 21 11:31:58 managed-node2 /usr/bin/podman[24838]: time="2024-12-21T11:31:58-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Dec 21 11:31:58 managed-node2 /usr/bin/podman[24838]: time="2024-12-21T11:31:58-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Dec 21 11:31:58 managed-node2 /usr/bin/podman[24838]: time="2024-12-21T11:31:58-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Dec 21 11:31:58 managed-node2 /usr/bin/podman[24838]: time="2024-12-21T11:31:58-05:00" level=debug msg="Using transient store: false" Dec 21 11:31:58 managed-node2 /usr/bin/podman[24838]: time="2024-12-21T11:31:58-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Dec 21 11:31:58 managed-node2 /usr/bin/podman[24838]: time="2024-12-21T11:31:58-05:00" level=debug msg="Cached value indicated that overlay is supported" Dec 21 11:31:58 managed-node2 /usr/bin/podman[24838]: time="2024-12-21T11:31:58-05:00" level=debug msg="Cached value indicated that overlay is supported" Dec 21 11:31:58 managed-node2 /usr/bin/podman[24838]: time="2024-12-21T11:31:58-05:00" level=debug msg="Cached value indicated that metacopy is not being used" Dec 21 11:31:58 managed-node2 /usr/bin/podman[24838]: time="2024-12-21T11:31:58-05:00" level=debug msg="Cached value indicated that native-diff is usable" Dec 21 11:31:58 managed-node2 systemd[22955]: Stopping libpod-conmon-a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18.scope... ░░ Subject: A stop job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has begun execution. ░░ ░░ The job identifier is 85. Dec 21 11:31:58 managed-node2 /usr/bin/podman[24838]: time="2024-12-21T11:31:58-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" Dec 21 11:31:58 managed-node2 systemd[22955]: Stopped libpod-conmon-a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18.scope. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 85 and the job result is done. Dec 21 11:31:58 managed-node2 systemd[22955]: Removed slice user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice - cgroup user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 84 and the job result is done. Dec 21 11:31:58 managed-node2 systemd[22955]: user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice: Failed to open /run/user/3001/systemd/transient/user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice: No such file or directory Dec 21 11:31:58 managed-node2 podman[24807]: Pods stopped: Dec 21 11:31:58 managed-node2 podman[24807]: a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f Dec 21 11:31:58 managed-node2 podman[24807]: Pods removed: Dec 21 11:31:58 managed-node2 podman[24807]: a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f Dec 21 11:31:58 managed-node2 podman[24807]: Secrets removed: Dec 21 11:31:58 managed-node2 podman[24807]: Volumes removed: Dec 21 11:31:58 managed-node2 systemd[22955]: Created slice user-libpod_pod_9556bd485cb17e948b14fa8acbafacdc25b3d8209d11a2eebe718d274f2a4356.slice - cgroup user-libpod_pod_9556bd485cb17e948b14fa8acbafacdc25b3d8209d11a2eebe718d274f2a4356.slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 86. Dec 21 11:31:58 managed-node2 systemd[22955]: Started libpod-c089df96847b506f85090c1b8671966e84959519256edf7dab9438abd05766c2.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 90. Dec 21 11:31:58 managed-node2 systemd[22955]: Started rootless-netns-0a95b04c.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 94. Dec 21 11:31:58 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Dec 21 11:31:58 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Dec 21 11:31:58 managed-node2 kernel: veth0: entered allmulticast mode Dec 21 11:31:58 managed-node2 kernel: veth0: entered promiscuous mode Dec 21 11:31:58 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Dec 21 11:31:58 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Dec 21 11:31:58 managed-node2 systemd[22955]: Started run-r22328a1bdabc44c488dffe68a598f778.scope - /usr/libexec/podman/aardvark-dns --config /run/user/3001/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 98. Dec 21 11:31:58 managed-node2 systemd[22955]: Started libpod-adb495b2f075e8901bb800ade0cacdb24aa4b5962c9cfc594d9f7d30984191e3.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 102. Dec 21 11:31:58 managed-node2 systemd[22955]: Started libpod-c599af12ec5a1d1a2998f0d3cb1aac751d459d1136207e789c19ad71328049a4.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 107. Dec 21 11:31:58 managed-node2 podman[24807]: Pod: Dec 21 11:31:58 managed-node2 podman[24807]: 9556bd485cb17e948b14fa8acbafacdc25b3d8209d11a2eebe718d274f2a4356 Dec 21 11:31:58 managed-node2 podman[24807]: Container: Dec 21 11:31:58 managed-node2 podman[24807]: c599af12ec5a1d1a2998f0d3cb1aac751d459d1136207e789c19ad71328049a4 Dec 21 11:31:58 managed-node2 systemd[22955]: Started podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 71. Dec 21 11:31:58 managed-node2 sudo[24801]: pam_unix(sudo:session): session closed for user podman_basic_user Dec 21 11:31:59 managed-node2 python3.12[25026]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Dec 21 11:32:00 managed-node2 python3.12[25158]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:32:01 managed-node2 python3.12[25291]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd2.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:32:03 managed-node2 python3.12[25423]: ansible-file Invoked with path=/tmp/lsr_9pquyim__podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:32:03 managed-node2 python3.12[25554]: ansible-file Invoked with path=/tmp/lsr_9pquyim__podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:32:04 managed-node2 systemd[4479]: Created slice background.slice - User Background Tasks Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 14. Dec 21 11:32:04 managed-node2 systemd[4479]: Starting systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Dec 21 11:32:04 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Dec 21 11:32:04 managed-node2 systemd[4479]: Finished systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Dec 21 11:32:04 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Dec 21 11:32:07 managed-node2 podman[25718]: 2024-12-21 11:32:07.27244711 -0500 EST m=+2.752253327 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Dec 21 11:32:07 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Dec 21 11:32:07 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Dec 21 11:32:07 managed-node2 python3.12[25864]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:32:08 managed-node2 python3.12[25995]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:32:08 managed-node2 python3.12[26126]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 21 11:32:09 managed-node2 python3.12[26231]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd2.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1734798728.406394-15466-99731724956341/.source.yml _original_basename=.zqj4emsk follow=False checksum=22d57ee085f96ddbcb2a5dc8bca5b7a52aeee580 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:32:09 managed-node2 python3.12[26362]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Dec 21 11:32:09 managed-node2 systemd[1]: Created slice machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice - cgroup machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice. ░░ Subject: A start job for unit machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice has finished successfully. ░░ ░░ The job identifier is 1990. Dec 21 11:32:09 managed-node2 podman[26369]: 2024-12-21 11:32:09.671403128 -0500 EST m=+0.080443089 container create fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1 (image=localhost/podman-pause:5.3.1-1733097600, name=a47ffa0659b6-infra, pod_id=a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688, io.buildah.version=1.38.0) Dec 21 11:32:09 managed-node2 podman[26369]: 2024-12-21 11:32:09.677566753 -0500 EST m=+0.086606799 pod create a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688 (image=, name=httpd2) Dec 21 11:32:09 managed-node2 podman[26369]: 2024-12-21 11:32:09.707533382 -0500 EST m=+0.116573407 container create 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688, io.containers.autoupdate=registry, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, app=test) Dec 21 11:32:09 managed-node2 podman[26369]: 2024-12-21 11:32:09.680552757 -0500 EST m=+0.089592781 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Dec 21 11:32:09 managed-node2 NetworkManager[775]: [1734798729.7286] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/3) Dec 21 11:32:09 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Dec 21 11:32:09 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Dec 21 11:32:09 managed-node2 kernel: veth0: entered allmulticast mode Dec 21 11:32:09 managed-node2 kernel: veth0: entered promiscuous mode Dec 21 11:32:09 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Dec 21 11:32:09 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Dec 21 11:32:09 managed-node2 NetworkManager[775]: [1734798729.7425] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/4) Dec 21 11:32:09 managed-node2 NetworkManager[775]: [1734798729.7477] device (veth0): carrier: link connected Dec 21 11:32:09 managed-node2 NetworkManager[775]: [1734798729.7483] device (podman1): carrier: link connected Dec 21 11:32:09 managed-node2 (udev-worker)[26385]: Network interface NamePolicy= disabled on kernel command line. Dec 21 11:32:09 managed-node2 (udev-worker)[26386]: Network interface NamePolicy= disabled on kernel command line. Dec 21 11:32:09 managed-node2 NetworkManager[775]: [1734798729.8035] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Dec 21 11:32:09 managed-node2 NetworkManager[775]: [1734798729.8040] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Dec 21 11:32:09 managed-node2 NetworkManager[775]: [1734798729.8047] device (podman1): Activation: starting connection 'podman1' (ecb2a905-7bae-48ad-b953-99d577d9776b) Dec 21 11:32:09 managed-node2 NetworkManager[775]: [1734798729.8048] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Dec 21 11:32:09 managed-node2 NetworkManager[775]: [1734798729.8095] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external') Dec 21 11:32:09 managed-node2 NetworkManager[775]: [1734798729.8097] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external') Dec 21 11:32:09 managed-node2 NetworkManager[775]: [1734798729.8099] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Dec 21 11:32:09 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 1997. Dec 21 11:32:09 managed-node2 NetworkManager[775]: [1734798729.8451] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Dec 21 11:32:09 managed-node2 NetworkManager[775]: [1734798729.8455] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external') Dec 21 11:32:09 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 1997. Dec 21 11:32:09 managed-node2 NetworkManager[775]: [1734798729.8459] device (podman1): Activation: successful, device activated. Dec 21 11:32:09 managed-node2 systemd[1]: Started run-r735556f5d4974f57a4b69c88f3c80d45.scope - /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-r735556f5d4974f57a4b69c88f3c80d45.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-r735556f5d4974f57a4b69c88f3c80d45.scope has finished successfully. ░░ ░░ The job identifier is 2076. Dec 21 11:32:09 managed-node2 aardvark-dns[26411]: starting aardvark on a child with pid 26421 Dec 21 11:32:09 managed-node2 aardvark-dns[26421]: Successfully parsed config Dec 21 11:32:09 managed-node2 aardvark-dns[26421]: Listen v4 ip {"podman-default-kube-network": [10.89.0.1]} Dec 21 11:32:09 managed-node2 aardvark-dns[26421]: Listen v6 ip {} Dec 21 11:32:09 managed-node2 aardvark-dns[26421]: Using the following upstream servers: [10.29.169.13:53, 10.29.170.12:53, 10.2.32.1:53] Dec 21 11:32:09 managed-node2 systemd[1]: Started libpod-conmon-fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1.scope. ░░ Subject: A start job for unit libpod-conmon-fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1.scope has finished successfully. ░░ ░░ The job identifier is 2082. Dec 21 11:32:09 managed-node2 conmon[26426]: conmon fd6f390a6db243ce508b : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/13/attach} Dec 21 11:32:09 managed-node2 conmon[26426]: conmon fd6f390a6db243ce508b : terminal_ctrl_fd: 13 Dec 21 11:32:09 managed-node2 conmon[26426]: conmon fd6f390a6db243ce508b : winsz read side: 17, winsz write side: 18 Dec 21 11:32:09 managed-node2 systemd[1]: Started libpod-fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1.scope - libcrun container. ░░ Subject: A start job for unit libpod-fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1.scope has finished successfully. ░░ ░░ The job identifier is 2089. Dec 21 11:32:09 managed-node2 conmon[26426]: conmon fd6f390a6db243ce508b : container PID: 26428 Dec 21 11:32:09 managed-node2 podman[26369]: 2024-12-21 11:32:09.943028043 -0500 EST m=+0.352068161 container init fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1 (image=localhost/podman-pause:5.3.1-1733097600, name=a47ffa0659b6-infra, pod_id=a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688, io.buildah.version=1.38.0) Dec 21 11:32:09 managed-node2 podman[26369]: 2024-12-21 11:32:09.946398608 -0500 EST m=+0.355438546 container start fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1 (image=localhost/podman-pause:5.3.1-1733097600, name=a47ffa0659b6-infra, pod_id=a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688, io.buildah.version=1.38.0) Dec 21 11:32:09 managed-node2 systemd[1]: Started libpod-conmon-9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70.scope. ░░ Subject: A start job for unit libpod-conmon-9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70.scope has finished successfully. ░░ ░░ The job identifier is 2096. Dec 21 11:32:09 managed-node2 conmon[26431]: conmon 9cb727865dc30cdb1991 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/12/attach} Dec 21 11:32:09 managed-node2 conmon[26431]: conmon 9cb727865dc30cdb1991 : terminal_ctrl_fd: 12 Dec 21 11:32:09 managed-node2 conmon[26431]: conmon 9cb727865dc30cdb1991 : winsz read side: 16, winsz write side: 17 Dec 21 11:32:09 managed-node2 systemd[1]: Started libpod-9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70.scope - libcrun container. ░░ Subject: A start job for unit libpod-9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70.scope has finished successfully. ░░ ░░ The job identifier is 2103. Dec 21 11:32:10 managed-node2 conmon[26431]: conmon 9cb727865dc30cdb1991 : container PID: 26433 Dec 21 11:32:10 managed-node2 podman[26369]: 2024-12-21 11:32:10.010181831 -0500 EST m=+0.419221899 container init 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Dec 21 11:32:10 managed-node2 podman[26369]: 2024-12-21 11:32:10.013437493 -0500 EST m=+0.422477550 container start 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Dec 21 11:32:10 managed-node2 podman[26369]: 2024-12-21 11:32:10.020079373 -0500 EST m=+0.429119342 pod start a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688 (image=, name=httpd2) Dec 21 11:32:10 managed-node2 python3.12[26362]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml Dec 21 11:32:10 managed-node2 python3.12[26362]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod: a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688 Container: 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70 Dec 21 11:32:10 managed-node2 python3.12[26362]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2024-12-21T11:32:09-05:00" level=info msg="/usr/bin/podman filtering at log level debug" time="2024-12-21T11:32:09-05:00" level=debug msg="Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2024-12-21T11:32:09-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2024-12-21T11:32:09-05:00" level=info msg="Using sqlite as database backend" time="2024-12-21T11:32:09-05:00" level=debug msg="Using graph driver overlay" time="2024-12-21T11:32:09-05:00" level=debug msg="Using graph root /var/lib/containers/storage" time="2024-12-21T11:32:09-05:00" level=debug msg="Using run root /run/containers/storage" time="2024-12-21T11:32:09-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" time="2024-12-21T11:32:09-05:00" level=debug msg="Using tmp dir /run/libpod" time="2024-12-21T11:32:09-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" time="2024-12-21T11:32:09-05:00" level=debug msg="Using transient store: false" time="2024-12-21T11:32:09-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2024-12-21T11:32:09-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2024-12-21T11:32:09-05:00" level=debug msg="Cached value indicated that metacopy is being used" time="2024-12-21T11:32:09-05:00" level=debug msg="Cached value indicated that native-diff is not being used" time="2024-12-21T11:32:09-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" time="2024-12-21T11:32:09-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" time="2024-12-21T11:32:09-05:00" level=debug msg="Initializing event backend journald" time="2024-12-21T11:32:09-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" time="2024-12-21T11:32:09-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2024-12-21T11:32:09-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2024-12-21T11:32:09-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2024-12-21T11:32:09-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" time="2024-12-21T11:32:09-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2024-12-21T11:32:09-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2024-12-21T11:32:09-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2024-12-21T11:32:09-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2024-12-21T11:32:09-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" time="2024-12-21T11:32:09-05:00" level=info msg="Setting parallel job count to 7" time="2024-12-21T11:32:09-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network 77beb6ebd60e75ff206e6ae63f4f50d6164d432e696e9e72f903a31005b7589f bridge podman1 2024-12-21 11:29:51.422756656 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2024-12-21T11:32:09-05:00" level=debug msg="Successfully loaded 2 networks" time="2024-12-21T11:32:09-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2024-12-21T11:32:09-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-21T11:32:09-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2024-12-21T11:32:09-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@e20c1c9f26b9ea9ec461cf486e641689385d0ba8255636809818a6a36925e38d\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2024-12-21T11:32:09-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@e20c1c9f26b9ea9ec461cf486e641689385d0ba8255636809818a6a36925e38d)" time="2024-12-21T11:32:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:e20c1c9f26b9ea9ec461cf486e641689385d0ba8255636809818a6a36925e38d\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Pod using bridge network mode" time="2024-12-21T11:32:09-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice for parent machine.slice and name libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688" time="2024-12-21T11:32:09-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice" time="2024-12-21T11:32:09-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice" time="2024-12-21T11:32:09-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2024-12-21T11:32:09-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-21T11:32:09-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2024-12-21T11:32:09-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@e20c1c9f26b9ea9ec461cf486e641689385d0ba8255636809818a6a36925e38d\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2024-12-21T11:32:09-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@e20c1c9f26b9ea9ec461cf486e641689385d0ba8255636809818a6a36925e38d)" time="2024-12-21T11:32:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:e20c1c9f26b9ea9ec461cf486e641689385d0ba8255636809818a6a36925e38d\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Inspecting image e20c1c9f26b9ea9ec461cf486e641689385d0ba8255636809818a6a36925e38d" time="2024-12-21T11:32:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:e20c1c9f26b9ea9ec461cf486e641689385d0ba8255636809818a6a36925e38d\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Inspecting image e20c1c9f26b9ea9ec461cf486e641689385d0ba8255636809818a6a36925e38d" time="2024-12-21T11:32:09-05:00" level=debug msg="Inspecting image e20c1c9f26b9ea9ec461cf486e641689385d0ba8255636809818a6a36925e38d" time="2024-12-21T11:32:09-05:00" level=debug msg="using systemd mode: false" time="2024-12-21T11:32:09-05:00" level=debug msg="setting container name a47ffa0659b6-infra" time="2024-12-21T11:32:09-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Allocated lock 1 for container fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1" time="2024-12-21T11:32:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:e20c1c9f26b9ea9ec461cf486e641689385d0ba8255636809818a6a36925e38d\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are supported" time="2024-12-21T11:32:09-05:00" level=debug msg="Created container \"fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Container \"fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1\" has work directory \"/var/lib/containers/storage/overlay-containers/fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1/userdata\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Container \"fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1\" has run directory \"/run/containers/storage/overlay-containers/fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1/userdata\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-21T11:32:09-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-21T11:32:09-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-12-21T11:32:09-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-21T11:32:09-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-12-21T11:32:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2024-12-21T11:32:09-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-21T11:32:09-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-21T11:32:09-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-12-21T11:32:09-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-21T11:32:09-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-12-21T11:32:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-21T11:32:09-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-21T11:32:09-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-12-21T11:32:09-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-21T11:32:09-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-12-21T11:32:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-12-21T11:32:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:32:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-21T11:32:09-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-21T11:32:09-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-12-21T11:32:09-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-21T11:32:09-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-12-21T11:32:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-12-21T11:32:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:32:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-12-21T11:32:09-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-12-21T11:32:09-05:00" level=debug msg="using systemd mode: false" time="2024-12-21T11:32:09-05:00" level=debug msg="adding container to pod httpd2" time="2024-12-21T11:32:09-05:00" level=debug msg="setting container name httpd2-httpd2" time="2024-12-21T11:32:09-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2024-12-21T11:32:09-05:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2024-12-21T11:32:09-05:00" level=debug msg="Adding mount /proc" time="2024-12-21T11:32:09-05:00" level=debug msg="Adding mount /dev" time="2024-12-21T11:32:09-05:00" level=debug msg="Adding mount /dev/pts" time="2024-12-21T11:32:09-05:00" level=debug msg="Adding mount /dev/mqueue" time="2024-12-21T11:32:09-05:00" level=debug msg="Adding mount /sys" time="2024-12-21T11:32:09-05:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2024-12-21T11:32:09-05:00" level=debug msg="Allocated lock 2 for container 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70" time="2024-12-21T11:32:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Created container \"9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Container \"9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70\" has work directory \"/var/lib/containers/storage/overlay-containers/9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70/userdata\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Container \"9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70\" has run directory \"/run/containers/storage/overlay-containers/9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70/userdata\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Strongconnecting node 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70" time="2024-12-21T11:32:09-05:00" level=debug msg="Pushed 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70 onto stack" time="2024-12-21T11:32:09-05:00" level=debug msg="Recursing to successor node fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1" time="2024-12-21T11:32:09-05:00" level=debug msg="Strongconnecting node fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1" time="2024-12-21T11:32:09-05:00" level=debug msg="Pushed fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1 onto stack" time="2024-12-21T11:32:09-05:00" level=debug msg="Finishing node fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1. Popped fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1 off stack" time="2024-12-21T11:32:09-05:00" level=debug msg="Finishing node 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70. Popped 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70 off stack" time="2024-12-21T11:32:09-05:00" level=debug msg="Made network namespace at /run/netns/netns-5e59e54c-138b-0296-4440-fd9a61a17fce for container fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1" [DEBUG netavark::network::validation] Validating network namespace... [DEBUG netavark::commands::setup] Setting up... time="2024-12-21T11:32:09-05:00" level=debug msg="overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/ZXMCTYPFUO6ETCCWBEVCYNUKHB,upperdir=/var/lib/containers/storage/overlay/6962fe750f8622474540467bf968da919219616184ee268561b3414ce705e7d5/diff,workdir=/var/lib/containers/storage/overlay/6962fe750f8622474540467bf968da919219616184ee268561b3414ce705e7d5/work,nodev,metacopy=on,context=\"system_u:object_r:container_file_t:s0:c418,c730\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Mounted container \"fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1\" at \"/var/lib/containers/storage/overlay/6962fe750f8622474540467bf968da919219616184ee268561b3414ce705e7d5/merged\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Created root filesystem for container fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1 at /var/lib/containers/storage/overlay/6962fe750f8622474540467bf968da919219616184ee268561b3414ce705e7d5/merged" [INFO netavark::firewall] Using nftables firewall driver [DEBUG netavark::network::bridge] Setup network podman-default-kube-network [DEBUG netavark::network::bridge] Container interface name: eth0 with IP addresses [10.89.0.2/24] [DEBUG netavark::network::bridge] Bridge name: podman1 with IP addresses [10.89.0.1/24] [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.ip_forward to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/podman1/rp_filter to 2 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv6/conf/eth0/autoconf to 0 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/arp_notify to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/rp_filter to 2 [INFO netavark::network::netlink] Adding route (dest: 0.0.0.0/0 ,gw: 10.89.0.1, metric 100) [DEBUG netavark::firewall::firewalld] Adding firewalld rules for network 10.89.0.0/24 [DEBUG netavark::firewall::firewalld] Adding subnet 10.89.0.0/24 to zone trusted as source [INFO netavark::firewall::nft] Creating container chain nv_77beb6eb_10_89_0_0_nm24 [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.conf.podman1.route_localnet to 1 [DEBUG netavark::dns::aardvark] Spawning aardvark server [DEBUG netavark::dns::aardvark] start aardvark-dns: ["systemd-run", "-q", "--scope", "/usr/libexec/podman/aardvark-dns", "--config", "/run/containers/networks/aardvark-dns", "-p", "53", "run"] [DEBUG netavark::commands::setup] { "podman-default-kube-network": StatusBlock { dns_search_domains: Some( [ "dns.podman", ], ), dns_server_ips: Some( [ 10.89.0.1, ], ), interfaces: Some( { "eth0": NetInterface { mac_address: "16:d0:4d:6b:ea:f8", subnets: Some( [ NetAddress { gateway: Some( 10.89.0.1, ), ipnet: 10.89.0.2/24, }, ], ), }, }, ), }, } [DEBUG netavark::commands::setup] Setup complete time="2024-12-21T11:32:09-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2024-12-21T11:32:09-05:00" level=debug msg="Setting Cgroups for container fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1 to machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice:libpod:fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1" time="2024-12-21T11:32:09-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2024-12-21T11:32:09-05:00" level=debug msg="Workdir \"/\" resolved to host path \"/var/lib/containers/storage/overlay/6962fe750f8622474540467bf968da919219616184ee268561b3414ce705e7d5/merged\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Created OCI spec for container fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1 at /var/lib/containers/storage/overlay-containers/fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1/userdata/config.json" time="2024-12-21T11:32:09-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice for parent machine.slice and name libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688" time="2024-12-21T11:32:09-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice" time="2024-12-21T11:32:09-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice" time="2024-12-21T11:32:09-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2024-12-21T11:32:09-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1 -u fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1/userdata -p /run/containers/storage/overlay-containers/fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1/userdata/pidfile -n a47ffa0659b6-infra --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1 --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1]" time="2024-12-21T11:32:09-05:00" level=info msg="Running conmon under slice machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice and unitName libpod-conmon-fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1.scope" time="2024-12-21T11:32:09-05:00" level=debug msg="Received: 26428" time="2024-12-21T11:32:09-05:00" level=info msg="Got Conmon PID as 26426" time="2024-12-21T11:32:09-05:00" level=debug msg="Created container fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1 in OCI runtime" time="2024-12-21T11:32:09-05:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2024-12-21T11:32:09-05:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2024-12-21T11:32:09-05:00" level=debug msg="Starting container fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1 with command [/catatonit -P]" time="2024-12-21T11:32:09-05:00" level=debug msg="Started container fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1" time="2024-12-21T11:32:09-05:00" level=debug msg="overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/VVWDSDX66M2DJOL5OSC7FCBPMU,upperdir=/var/lib/containers/storage/overlay/ed541d34d3eb7c9ee5c2086f113fed4c1dfed1009c0fdfa277d87228f9fd076b/diff,workdir=/var/lib/containers/storage/overlay/ed541d34d3eb7c9ee5c2086f113fed4c1dfed1009c0fdfa277d87228f9fd076b/work,nodev,metacopy=on,context=\"system_u:object_r:container_file_t:s0:c418,c730\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Mounted container \"9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70\" at \"/var/lib/containers/storage/overlay/ed541d34d3eb7c9ee5c2086f113fed4c1dfed1009c0fdfa277d87228f9fd076b/merged\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Created root filesystem for container 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70 at /var/lib/containers/storage/overlay/ed541d34d3eb7c9ee5c2086f113fed4c1dfed1009c0fdfa277d87228f9fd076b/merged" time="2024-12-21T11:32:09-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2024-12-21T11:32:09-05:00" level=debug msg="Setting Cgroups for container 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70 to machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice:libpod:9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70" time="2024-12-21T11:32:09-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2024-12-21T11:32:09-05:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2024-12-21T11:32:09-05:00" level=debug msg="Created OCI spec for container 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70 at /var/lib/containers/storage/overlay-containers/9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70/userdata/config.json" time="2024-12-21T11:32:09-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice for parent machine.slice and name libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688" time="2024-12-21T11:32:09-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice" time="2024-12-21T11:32:09-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice" time="2024-12-21T11:32:09-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2024-12-21T11:32:09-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70 -u 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70/userdata -p /run/containers/storage/overlay-containers/9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70/userdata/pidfile -n httpd2-httpd2 --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70 --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70]" time="2024-12-21T11:32:09-05:00" level=info msg="Running conmon under slice machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice and unitName libpod-conmon-9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70.scope" time="2024-12-21T11:32:10-05:00" level=debug msg="Received: 26433" time="2024-12-21T11:32:10-05:00" level=info msg="Got Conmon PID as 26431" time="2024-12-21T11:32:10-05:00" level=debug msg="Created container 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70 in OCI runtime" time="2024-12-21T11:32:10-05:00" level=debug msg="Starting container 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70 with command [/bin/busybox-extras httpd -f -p 80]" time="2024-12-21T11:32:10-05:00" level=debug msg="Started container 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70" time="2024-12-21T11:32:10-05:00" level=debug msg="Called kube.PersistentPostRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2024-12-21T11:32:10-05:00" level=debug msg="Shutting down engines" time="2024-12-21T11:32:10-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=26369 Dec 21 11:32:10 managed-node2 python3.12[26362]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Dec 21 11:32:10 managed-node2 python3.12[26565]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 21 11:32:10 managed-node2 systemd[1]: Reload requested from client PID 26566 ('systemctl') (unit session-5.scope)... Dec 21 11:32:10 managed-node2 systemd[1]: Reloading... Dec 21 11:32:10 managed-node2 systemd[1]: Reloading finished in 212 ms. Dec 21 11:32:11 managed-node2 python3.12[26752]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Dec 21 11:32:11 managed-node2 systemd[1]: Reload requested from client PID 26755 ('systemctl') (unit session-5.scope)... Dec 21 11:32:11 managed-node2 systemd[1]: Reloading... Dec 21 11:32:11 managed-node2 systemd[1]: Reloading finished in 211 ms. Dec 21 11:32:12 managed-node2 python3.12[26941]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Dec 21 11:32:12 managed-node2 systemd[1]: Created slice system-podman\x2dkube.slice - Slice /system/podman-kube. ░░ Subject: A start job for unit system-podman\x2dkube.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit system-podman\x2dkube.slice has finished successfully. ░░ ░░ The job identifier is 2188. Dec 21 11:32:12 managed-node2 systemd[1]: Starting podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun execution. ░░ ░░ The job identifier is 2110. Dec 21 11:32:12 managed-node2 podman[26945]: 2024-12-21 11:32:12.499598585 -0500 EST m=+0.026259019 pod stop a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688 (image=, name=httpd2) Dec 21 11:32:12 managed-node2 systemd[1]: libpod-fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1.scope has successfully entered the 'dead' state. Dec 21 11:32:12 managed-node2 podman[26945]: 2024-12-21 11:32:12.523956412 -0500 EST m=+0.050617110 container died fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1 (image=localhost/podman-pause:5.3.1-1733097600, name=a47ffa0659b6-infra, io.buildah.version=1.38.0) Dec 21 11:32:12 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Dec 21 11:32:12 managed-node2 aardvark-dns[26421]: Received SIGHUP Dec 21 11:32:12 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Dec 21 11:32:12 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Dec 21 11:32:12 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Dec 21 11:32:12 managed-node2 aardvark-dns[26421]: Successfully parsed config Dec 21 11:32:12 managed-node2 aardvark-dns[26421]: Listen v4 ip {} Dec 21 11:32:12 managed-node2 aardvark-dns[26421]: Listen v6 ip {} Dec 21 11:32:12 managed-node2 aardvark-dns[26421]: No configuration found stopping the sever Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1)" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=info msg="Using sqlite as database backend" Dec 21 11:32:12 managed-node2 systemd[1]: run-r735556f5d4974f57a4b69c88f3c80d45.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-r735556f5d4974f57a4b69c88f3c80d45.scope has successfully entered the 'dead' state. Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Using graph driver overlay" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Using graph root /var/lib/containers/storage" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Using run root /run/containers/storage" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Using tmp dir /run/libpod" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Using transient store: false" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Cached value indicated that overlay is supported" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Cached value indicated that overlay is supported" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Cached value indicated that metacopy is being used" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Cached value indicated that native-diff is not being used" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Initializing event backend journald" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=info msg="Setting parallel job count to 7" Dec 21 11:32:12 managed-node2 NetworkManager[775]: [1734798732.5706] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Dec 21 11:32:12 managed-node2 systemd[1]: run-netns-netns\x2d5e59e54c\x2d138b\x2d0296\x2d4440\x2dfd9a61a17fce.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2d5e59e54c\x2d138b\x2d0296\x2d4440\x2dfd9a61a17fce.mount has successfully entered the 'dead' state. Dec 21 11:32:12 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1-userdata-shm.mount has successfully entered the 'dead' state. Dec 21 11:32:12 managed-node2 systemd[1]: var-lib-containers-storage-overlay-6962fe750f8622474540467bf968da919219616184ee268561b3414ce705e7d5-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-6962fe750f8622474540467bf968da919219616184ee268561b3414ce705e7d5-merged.mount has successfully entered the 'dead' state. Dec 21 11:32:12 managed-node2 podman[26945]: 2024-12-21 11:32:12.647414868 -0500 EST m=+0.174075678 container cleanup fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1 (image=localhost/podman-pause:5.3.1-1733097600, name=a47ffa0659b6-infra, pod_id=a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688, io.buildah.version=1.38.0) Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1)" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Shutting down engines" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=26956 Dec 21 11:32:12 managed-node2 systemd[1]: libpod-conmon-fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1.scope has successfully entered the 'dead' state. Dec 21 11:32:22 managed-node2 podman[26945]: time="2024-12-21T11:32:22-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd2-httpd2 in 10 seconds, resorting to SIGKILL" Dec 21 11:32:22 managed-node2 systemd[1]: libpod-9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70.scope has successfully entered the 'dead' state. Dec 21 11:32:22 managed-node2 conmon[26431]: conmon 9cb727865dc30cdb1991 : container 26433 exited with status 137 Dec 21 11:32:22 managed-node2 conmon[26431]: conmon 9cb727865dc30cdb1991 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice/libpod-9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70.scope/container/memory.events Dec 21 11:32:22 managed-node2 podman[26945]: 2024-12-21 11:32:22.546530099 -0500 EST m=+10.073190512 container died 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70)" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=info msg="Using sqlite as database backend" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Using graph driver overlay" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Using graph root /var/lib/containers/storage" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Using run root /run/containers/storage" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Using tmp dir /run/libpod" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Using transient store: false" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Cached value indicated that overlay is supported" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Cached value indicated that overlay is supported" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Cached value indicated that metacopy is being used" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Cached value indicated that native-diff is not being used" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Initializing event backend journald" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=info msg="Setting parallel job count to 7" Dec 21 11:32:22 managed-node2 systemd[1]: var-lib-containers-storage-overlay-ed541d34d3eb7c9ee5c2086f113fed4c1dfed1009c0fdfa277d87228f9fd076b-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-ed541d34d3eb7c9ee5c2086f113fed4c1dfed1009c0fdfa277d87228f9fd076b-merged.mount has successfully entered the 'dead' state. Dec 21 11:32:22 managed-node2 podman[26945]: 2024-12-21 11:32:22.585752966 -0500 EST m=+10.112413363 container cleanup 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70)" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Shutting down engines" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=26979 Dec 21 11:32:22 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Dec 21 11:32:22 managed-node2 systemd[1]: Stopping libpod-conmon-9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70.scope... ░░ Subject: A stop job for unit libpod-conmon-9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70.scope has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70.scope has begun execution. ░░ ░░ The job identifier is 2196. Dec 21 11:32:22 managed-node2 systemd[1]: libpod-conmon-9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70.scope has successfully entered the 'dead' state. Dec 21 11:32:22 managed-node2 systemd[1]: Stopped libpod-conmon-9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70.scope. ░░ Subject: A stop job for unit libpod-conmon-9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70.scope has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70.scope has finished. ░░ ░░ The job identifier is 2196 and the job result is done. Dec 21 11:32:22 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Dec 21 11:32:22 managed-node2 systemd[1]: Removed slice machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice - cgroup machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice. ░░ Subject: A stop job for unit machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice has finished. ░░ ░░ The job identifier is 2195 and the job result is done. Dec 21 11:32:22 managed-node2 podman[26945]: 2024-12-21 11:32:22.650966739 -0500 EST m=+10.177627079 container remove 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Dec 21 11:32:22 managed-node2 podman[26945]: 2024-12-21 11:32:22.679718399 -0500 EST m=+10.206378740 container remove fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1 (image=localhost/podman-pause:5.3.1-1733097600, name=a47ffa0659b6-infra, pod_id=a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688, io.buildah.version=1.38.0) Dec 21 11:32:22 managed-node2 systemd[1]: machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice: Failed to open /run/systemd/transient/machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice: No such file or directory Dec 21 11:32:22 managed-node2 podman[26945]: 2024-12-21 11:32:22.689270657 -0500 EST m=+10.215930985 pod remove a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688 (image=, name=httpd2) Dec 21 11:32:22 managed-node2 podman[26945]: Pods stopped: Dec 21 11:32:22 managed-node2 podman[26945]: a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688 Dec 21 11:32:22 managed-node2 podman[26945]: Pods removed: Dec 21 11:32:22 managed-node2 podman[26945]: a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688 Dec 21 11:32:22 managed-node2 podman[26945]: Secrets removed: Dec 21 11:32:22 managed-node2 podman[26945]: Volumes removed: Dec 21 11:32:22 managed-node2 podman[26945]: 2024-12-21 11:32:22.713250966 -0500 EST m=+10.239911307 container create 1e5d0ec512aa99c96c2d4f268a0e3adddf1d038167f17193e41b2f417e3d0774 (image=localhost/podman-pause:5.3.1-1733097600, name=ce41ff3061ea-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Dec 21 11:32:22 managed-node2 systemd[1]: Created slice machine-libpod_pod_d2f6641bb2ef15c9a4fc00c612016d8a1e5a2401e74b69df76528f5dea2e09a9.slice - cgroup machine-libpod_pod_d2f6641bb2ef15c9a4fc00c612016d8a1e5a2401e74b69df76528f5dea2e09a9.slice. ░░ Subject: A start job for unit machine-libpod_pod_d2f6641bb2ef15c9a4fc00c612016d8a1e5a2401e74b69df76528f5dea2e09a9.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_d2f6641bb2ef15c9a4fc00c612016d8a1e5a2401e74b69df76528f5dea2e09a9.slice has finished successfully. ░░ ░░ The job identifier is 2197. Dec 21 11:32:22 managed-node2 podman[26945]: 2024-12-21 11:32:22.760070087 -0500 EST m=+10.286730418 container create 6232f80e7e24d137282008f1a8c56f86974993cc3abcee33bf4f2bb238b7f54b (image=localhost/podman-pause:5.3.1-1733097600, name=d2f6641bb2ef-infra, pod_id=d2f6641bb2ef15c9a4fc00c612016d8a1e5a2401e74b69df76528f5dea2e09a9, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Dec 21 11:32:22 managed-node2 podman[26945]: 2024-12-21 11:32:22.768620734 -0500 EST m=+10.295281064 pod create d2f6641bb2ef15c9a4fc00c612016d8a1e5a2401e74b69df76528f5dea2e09a9 (image=, name=httpd2) Dec 21 11:32:22 managed-node2 podman[26945]: 2024-12-21 11:32:22.795590792 -0500 EST m=+10.322251120 container create fb676e5504a3fceb1eaa23b3ee26e5a23ef2d12ed9e49ea918a4b7043c59a85e (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=d2f6641bb2ef15c9a4fc00c612016d8a1e5a2401e74b69df76528f5dea2e09a9, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry) Dec 21 11:32:22 managed-node2 podman[26945]: 2024-12-21 11:32:22.796419192 -0500 EST m=+10.323079662 container restart 1e5d0ec512aa99c96c2d4f268a0e3adddf1d038167f17193e41b2f417e3d0774 (image=localhost/podman-pause:5.3.1-1733097600, name=ce41ff3061ea-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Dec 21 11:32:22 managed-node2 systemd[1]: Started libpod-1e5d0ec512aa99c96c2d4f268a0e3adddf1d038167f17193e41b2f417e3d0774.scope - libcrun container. ░░ Subject: A start job for unit libpod-1e5d0ec512aa99c96c2d4f268a0e3adddf1d038167f17193e41b2f417e3d0774.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-1e5d0ec512aa99c96c2d4f268a0e3adddf1d038167f17193e41b2f417e3d0774.scope has finished successfully. ░░ ░░ The job identifier is 2203. Dec 21 11:32:22 managed-node2 podman[26945]: 2024-12-21 11:32:22.772763403 -0500 EST m=+10.299423841 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Dec 21 11:32:22 managed-node2 podman[26945]: 2024-12-21 11:32:22.864475426 -0500 EST m=+10.391135881 container init 1e5d0ec512aa99c96c2d4f268a0e3adddf1d038167f17193e41b2f417e3d0774 (image=localhost/podman-pause:5.3.1-1733097600, name=ce41ff3061ea-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Dec 21 11:32:22 managed-node2 podman[26945]: 2024-12-21 11:32:22.866845559 -0500 EST m=+10.393505965 container start 1e5d0ec512aa99c96c2d4f268a0e3adddf1d038167f17193e41b2f417e3d0774 (image=localhost/podman-pause:5.3.1-1733097600, name=ce41ff3061ea-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Dec 21 11:32:22 managed-node2 NetworkManager[775]: [1734798742.8809] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/5) Dec 21 11:32:22 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Dec 21 11:32:22 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Dec 21 11:32:22 managed-node2 kernel: veth0: entered allmulticast mode Dec 21 11:32:22 managed-node2 kernel: veth0: entered promiscuous mode Dec 21 11:32:22 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Dec 21 11:32:22 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Dec 21 11:32:22 managed-node2 NetworkManager[775]: [1734798742.8926] device (podman1): carrier: link connected Dec 21 11:32:22 managed-node2 NetworkManager[775]: [1734798742.8943] device (veth0): carrier: link connected Dec 21 11:32:22 managed-node2 NetworkManager[775]: [1734798742.8948] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/6) Dec 21 11:32:22 managed-node2 (udev-worker)[27001]: Network interface NamePolicy= disabled on kernel command line. Dec 21 11:32:22 managed-node2 (udev-worker)[27000]: Network interface NamePolicy= disabled on kernel command line. Dec 21 11:32:22 managed-node2 NetworkManager[775]: [1734798742.9312] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Dec 21 11:32:22 managed-node2 NetworkManager[775]: [1734798742.9317] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Dec 21 11:32:22 managed-node2 NetworkManager[775]: [1734798742.9324] device (podman1): Activation: starting connection 'podman1' (29f3f6df-595a-4aab-a041-3e41dcfec371) Dec 21 11:32:22 managed-node2 NetworkManager[775]: [1734798742.9326] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Dec 21 11:32:22 managed-node2 NetworkManager[775]: [1734798742.9329] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external') Dec 21 11:32:22 managed-node2 NetworkManager[775]: [1734798742.9331] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external') Dec 21 11:32:22 managed-node2 NetworkManager[775]: [1734798742.9333] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Dec 21 11:32:22 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2210. Dec 21 11:32:22 managed-node2 systemd[1]: Started run-r88fc7918e0e342d1b6a0c330a8db1509.scope - /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-r88fc7918e0e342d1b6a0c330a8db1509.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-r88fc7918e0e342d1b6a0c330a8db1509.scope has finished successfully. ░░ ░░ The job identifier is 2289. Dec 21 11:32:22 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2210. Dec 21 11:32:22 managed-node2 NetworkManager[775]: [1734798742.9895] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Dec 21 11:32:22 managed-node2 NetworkManager[775]: [1734798742.9898] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external') Dec 21 11:32:22 managed-node2 NetworkManager[775]: [1734798742.9903] device (podman1): Activation: successful, device activated. Dec 21 11:32:23 managed-node2 systemd[1]: Started libpod-6232f80e7e24d137282008f1a8c56f86974993cc3abcee33bf4f2bb238b7f54b.scope - libcrun container. ░░ Subject: A start job for unit libpod-6232f80e7e24d137282008f1a8c56f86974993cc3abcee33bf4f2bb238b7f54b.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-6232f80e7e24d137282008f1a8c56f86974993cc3abcee33bf4f2bb238b7f54b.scope has finished successfully. ░░ ░░ The job identifier is 2295. Dec 21 11:32:23 managed-node2 podman[26945]: 2024-12-21 11:32:23.032325647 -0500 EST m=+10.558986112 container init 6232f80e7e24d137282008f1a8c56f86974993cc3abcee33bf4f2bb238b7f54b (image=localhost/podman-pause:5.3.1-1733097600, name=d2f6641bb2ef-infra, pod_id=d2f6641bb2ef15c9a4fc00c612016d8a1e5a2401e74b69df76528f5dea2e09a9, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, io.buildah.version=1.38.0) Dec 21 11:32:23 managed-node2 podman[26945]: 2024-12-21 11:32:23.035439049 -0500 EST m=+10.562099435 container start 6232f80e7e24d137282008f1a8c56f86974993cc3abcee33bf4f2bb238b7f54b (image=localhost/podman-pause:5.3.1-1733097600, name=d2f6641bb2ef-infra, pod_id=d2f6641bb2ef15c9a4fc00c612016d8a1e5a2401e74b69df76528f5dea2e09a9, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, io.buildah.version=1.38.0) Dec 21 11:32:23 managed-node2 systemd[1]: Started libpod-fb676e5504a3fceb1eaa23b3ee26e5a23ef2d12ed9e49ea918a4b7043c59a85e.scope - libcrun container. ░░ Subject: A start job for unit libpod-fb676e5504a3fceb1eaa23b3ee26e5a23ef2d12ed9e49ea918a4b7043c59a85e.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-fb676e5504a3fceb1eaa23b3ee26e5a23ef2d12ed9e49ea918a4b7043c59a85e.scope has finished successfully. ░░ ░░ The job identifier is 2302. Dec 21 11:32:23 managed-node2 podman[26945]: 2024-12-21 11:32:23.084962358 -0500 EST m=+10.611622739 container init fb676e5504a3fceb1eaa23b3ee26e5a23ef2d12ed9e49ea918a4b7043c59a85e (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=d2f6641bb2ef15c9a4fc00c612016d8a1e5a2401e74b69df76528f5dea2e09a9, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Dec 21 11:32:23 managed-node2 podman[26945]: 2024-12-21 11:32:23.087780848 -0500 EST m=+10.614441278 container start fb676e5504a3fceb1eaa23b3ee26e5a23ef2d12ed9e49ea918a4b7043c59a85e (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=d2f6641bb2ef15c9a4fc00c612016d8a1e5a2401e74b69df76528f5dea2e09a9, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Dec 21 11:32:23 managed-node2 podman[26945]: 2024-12-21 11:32:23.094305323 -0500 EST m=+10.620965733 pod start d2f6641bb2ef15c9a4fc00c612016d8a1e5a2401e74b69df76528f5dea2e09a9 (image=, name=httpd2) Dec 21 11:32:23 managed-node2 podman[26945]: Pod: Dec 21 11:32:23 managed-node2 podman[26945]: d2f6641bb2ef15c9a4fc00c612016d8a1e5a2401e74b69df76528f5dea2e09a9 Dec 21 11:32:23 managed-node2 podman[26945]: Container: Dec 21 11:32:23 managed-node2 podman[26945]: fb676e5504a3fceb1eaa23b3ee26e5a23ef2d12ed9e49ea918a4b7043c59a85e Dec 21 11:32:23 managed-node2 systemd[1]: Started podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished successfully. ░░ ░░ The job identifier is 2110. Dec 21 11:32:24 managed-node2 python3.12[27181]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:32:25 managed-node2 python3.12[27314]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd3.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:32:26 managed-node2 python3.12[27446]: ansible-file Invoked with path=/tmp/lsr_9pquyim__podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:32:26 managed-node2 python3.12[27577]: ansible-file Invoked with path=/tmp/lsr_9pquyim__podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:32:28 managed-node2 podman[27739]: 2024-12-21 11:32:28.116444429 -0500 EST m=+0.890032782 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Dec 21 11:32:28 managed-node2 python3.12[27885]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:32:28 managed-node2 python3.12[28016]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:32:29 managed-node2 python3.12[28147]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 21 11:32:29 managed-node2 python3.12[28252]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd3.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1734798749.203544-16314-18816282897815/.source.yml _original_basename=._3jkfmmm follow=False checksum=356b616d8bef8b884d4d415b95d12e39be295f85 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:32:30 managed-node2 python3.12[28383]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Dec 21 11:32:30 managed-node2 systemd[1]: Created slice machine-libpod_pod_0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702.slice - cgroup machine-libpod_pod_0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702.slice. ░░ Subject: A start job for unit machine-libpod_pod_0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702.slice has finished successfully. ░░ ░░ The job identifier is 2309. Dec 21 11:32:30 managed-node2 podman[28391]: 2024-12-21 11:32:30.535472432 -0500 EST m=+0.063890926 container create a6c52234a492d3974488b003466f9269dae3dbd90bd3bc4a842cb9ab66932875 (image=localhost/podman-pause:5.3.1-1733097600, name=0d2100f17275-infra, pod_id=0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702, io.buildah.version=1.38.0) Dec 21 11:32:30 managed-node2 podman[28391]: 2024-12-21 11:32:30.542014213 -0500 EST m=+0.070432641 pod create 0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702 (image=, name=httpd3) Dec 21 11:32:30 managed-node2 podman[28391]: 2024-12-21 11:32:30.570810705 -0500 EST m=+0.099229197 container create 32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702, created_by=test/system/build-testimage, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry, created_at=2021-06-10T18:55:36Z) Dec 21 11:32:30 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Dec 21 11:32:30 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Dec 21 11:32:30 managed-node2 kernel: veth1: entered allmulticast mode Dec 21 11:32:30 managed-node2 kernel: veth1: entered promiscuous mode Dec 21 11:32:30 managed-node2 NetworkManager[775]: [1734798750.5874] manager: (veth1): new Veth device (/org/freedesktop/NetworkManager/Devices/7) Dec 21 11:32:30 managed-node2 podman[28391]: 2024-12-21 11:32:30.544034506 -0500 EST m=+0.072453111 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Dec 21 11:32:30 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Dec 21 11:32:30 managed-node2 kernel: podman1: port 2(veth1) entered forwarding state Dec 21 11:32:30 managed-node2 NetworkManager[775]: [1734798750.5913] device (veth1): carrier: link connected Dec 21 11:32:30 managed-node2 (udev-worker)[28410]: Network interface NamePolicy= disabled on kernel command line. Dec 21 11:32:30 managed-node2 systemd[1]: Started libpod-conmon-a6c52234a492d3974488b003466f9269dae3dbd90bd3bc4a842cb9ab66932875.scope. ░░ Subject: A start job for unit libpod-conmon-a6c52234a492d3974488b003466f9269dae3dbd90bd3bc4a842cb9ab66932875.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-a6c52234a492d3974488b003466f9269dae3dbd90bd3bc4a842cb9ab66932875.scope has finished successfully. ░░ ░░ The job identifier is 2316. Dec 21 11:32:30 managed-node2 systemd[1]: Started libpod-a6c52234a492d3974488b003466f9269dae3dbd90bd3bc4a842cb9ab66932875.scope - libcrun container. ░░ Subject: A start job for unit libpod-a6c52234a492d3974488b003466f9269dae3dbd90bd3bc4a842cb9ab66932875.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-a6c52234a492d3974488b003466f9269dae3dbd90bd3bc4a842cb9ab66932875.scope has finished successfully. ░░ ░░ The job identifier is 2323. Dec 21 11:32:30 managed-node2 podman[28391]: 2024-12-21 11:32:30.719560842 -0500 EST m=+0.247979403 container init a6c52234a492d3974488b003466f9269dae3dbd90bd3bc4a842cb9ab66932875 (image=localhost/podman-pause:5.3.1-1733097600, name=0d2100f17275-infra, pod_id=0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702, io.buildah.version=1.38.0) Dec 21 11:32:30 managed-node2 podman[28391]: 2024-12-21 11:32:30.723001472 -0500 EST m=+0.251419992 container start a6c52234a492d3974488b003466f9269dae3dbd90bd3bc4a842cb9ab66932875 (image=localhost/podman-pause:5.3.1-1733097600, name=0d2100f17275-infra, pod_id=0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702, io.buildah.version=1.38.0) Dec 21 11:32:30 managed-node2 systemd[1]: Started libpod-conmon-32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c.scope. ░░ Subject: A start job for unit libpod-conmon-32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c.scope has finished successfully. ░░ ░░ The job identifier is 2330. Dec 21 11:32:30 managed-node2 systemd[1]: Started libpod-32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c.scope - libcrun container. ░░ Subject: A start job for unit libpod-32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c.scope has finished successfully. ░░ ░░ The job identifier is 2337. Dec 21 11:32:30 managed-node2 podman[28391]: 2024-12-21 11:32:30.792451436 -0500 EST m=+0.320869970 container init 32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Dec 21 11:32:30 managed-node2 podman[28391]: 2024-12-21 11:32:30.795390308 -0500 EST m=+0.323808836 container start 32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Dec 21 11:32:30 managed-node2 podman[28391]: 2024-12-21 11:32:30.801447057 -0500 EST m=+0.329865572 pod start 0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702 (image=, name=httpd3) Dec 21 11:32:31 managed-node2 python3.12[28571]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 21 11:32:31 managed-node2 systemd[1]: Reload requested from client PID 28572 ('systemctl') (unit session-5.scope)... Dec 21 11:32:31 managed-node2 systemd[1]: Reloading... Dec 21 11:32:31 managed-node2 systemd[1]: Reloading finished in 220 ms. Dec 21 11:32:31 managed-node2 systemd[1]: Starting logrotate.service - Rotate log files... ░░ Subject: A start job for unit logrotate.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has begun execution. ░░ ░░ The job identifier is 2344. Dec 21 11:32:31 managed-node2 systemd[1]: logrotate.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit logrotate.service has successfully entered the 'dead' state. Dec 21 11:32:31 managed-node2 systemd[1]: Finished logrotate.service - Rotate log files. ░░ Subject: A start job for unit logrotate.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has finished successfully. ░░ ░░ The job identifier is 2344. Dec 21 11:32:32 managed-node2 python3.12[28762]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Dec 21 11:32:32 managed-node2 systemd[1]: Reload requested from client PID 28765 ('systemctl') (unit session-5.scope)... Dec 21 11:32:32 managed-node2 systemd[1]: Reloading... Dec 21 11:32:32 managed-node2 systemd[1]: Reloading finished in 213 ms. Dec 21 11:32:33 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Dec 21 11:32:33 managed-node2 python3.12[28951]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Dec 21 11:32:33 managed-node2 systemd[1]: Starting podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun execution. ░░ ░░ The job identifier is 2422. Dec 21 11:32:33 managed-node2 podman[28956]: 2024-12-21 11:32:33.242876627 -0500 EST m=+0.025638890 pod stop 0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702 (image=, name=httpd3) Dec 21 11:32:33 managed-node2 systemd[1]: libpod-a6c52234a492d3974488b003466f9269dae3dbd90bd3bc4a842cb9ab66932875.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-a6c52234a492d3974488b003466f9269dae3dbd90bd3bc4a842cb9ab66932875.scope has successfully entered the 'dead' state. Dec 21 11:32:33 managed-node2 podman[28956]: 2024-12-21 11:32:33.263225055 -0500 EST m=+0.045987552 container died a6c52234a492d3974488b003466f9269dae3dbd90bd3bc4a842cb9ab66932875 (image=localhost/podman-pause:5.3.1-1733097600, name=0d2100f17275-infra, io.buildah.version=1.38.0) Dec 21 11:32:33 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Dec 21 11:32:33 managed-node2 kernel: veth1 (unregistering): left allmulticast mode Dec 21 11:32:33 managed-node2 kernel: veth1 (unregistering): left promiscuous mode Dec 21 11:32:33 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Dec 21 11:32:33 managed-node2 systemd[1]: run-netns-netns\x2d4e373f23\x2d58bf\x2df7fc\x2da36b\x2dd13613a2791c.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2d4e373f23\x2d58bf\x2df7fc\x2da36b\x2dd13613a2791c.mount has successfully entered the 'dead' state. Dec 21 11:32:33 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-a6c52234a492d3974488b003466f9269dae3dbd90bd3bc4a842cb9ab66932875-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-a6c52234a492d3974488b003466f9269dae3dbd90bd3bc4a842cb9ab66932875-userdata-shm.mount has successfully entered the 'dead' state. Dec 21 11:32:33 managed-node2 systemd[1]: var-lib-containers-storage-overlay-0a2ca7ba44d782d944ef1485a30faf82fbf1d196b6a7a05d8f71aa13fa0d892f-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-0a2ca7ba44d782d944ef1485a30faf82fbf1d196b6a7a05d8f71aa13fa0d892f-merged.mount has successfully entered the 'dead' state. Dec 21 11:32:33 managed-node2 podman[28956]: 2024-12-21 11:32:33.34672823 -0500 EST m=+0.129490409 container cleanup a6c52234a492d3974488b003466f9269dae3dbd90bd3bc4a842cb9ab66932875 (image=localhost/podman-pause:5.3.1-1733097600, name=0d2100f17275-infra, pod_id=0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702, io.buildah.version=1.38.0) Dec 21 11:32:33 managed-node2 systemd[1]: libpod-conmon-a6c52234a492d3974488b003466f9269dae3dbd90bd3bc4a842cb9ab66932875.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-a6c52234a492d3974488b003466f9269dae3dbd90bd3bc4a842cb9ab66932875.scope has successfully entered the 'dead' state. Dec 21 11:32:43 managed-node2 podman[28956]: time="2024-12-21T11:32:43-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd3-httpd3 in 10 seconds, resorting to SIGKILL" Dec 21 11:32:43 managed-node2 systemd[1]: libpod-32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c.scope has successfully entered the 'dead' state. Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.289656672 -0500 EST m=+10.072419071 container died 32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Dec 21 11:32:43 managed-node2 systemd[1]: var-lib-containers-storage-overlay-90f1665c6d14610807025945bb1c5bf0a44b71c87ac6b1a95b1dcb8c357cde09-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-90f1665c6d14610807025945bb1c5bf0a44b71c87ac6b1a95b1dcb8c357cde09-merged.mount has successfully entered the 'dead' state. Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.32683333 -0500 EST m=+10.109595630 container cleanup 32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Dec 21 11:32:43 managed-node2 systemd[1]: Stopping libpod-conmon-32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c.scope... ░░ Subject: A stop job for unit libpod-conmon-32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c.scope has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c.scope has begun execution. ░░ ░░ The job identifier is 2508. Dec 21 11:32:43 managed-node2 systemd[1]: libpod-conmon-32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c.scope has successfully entered the 'dead' state. Dec 21 11:32:43 managed-node2 systemd[1]: Stopped libpod-conmon-32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c.scope. ░░ Subject: A stop job for unit libpod-conmon-32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c.scope has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c.scope has finished. ░░ ░░ The job identifier is 2508 and the job result is done. Dec 21 11:32:43 managed-node2 systemd[1]: Removed slice machine-libpod_pod_0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702.slice - cgroup machine-libpod_pod_0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702.slice. ░░ Subject: A stop job for unit machine-libpod_pod_0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702.slice has finished. ░░ ░░ The job identifier is 2507 and the job result is done. Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.33522454 -0500 EST m=+10.117986798 pod stop 0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702 (image=, name=httpd3) Dec 21 11:32:43 managed-node2 systemd[1]: machine-libpod_pod_0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702.slice: Failed to open /run/systemd/transient/machine-libpod_pod_0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702.slice: No such file or directory Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.342340383 -0500 EST m=+10.125102713 pod stop 0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702 (image=, name=httpd3) Dec 21 11:32:43 managed-node2 systemd[1]: machine-libpod_pod_0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702.slice: Failed to open /run/systemd/transient/machine-libpod_pod_0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702.slice: No such file or directory Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.369732065 -0500 EST m=+10.152494255 container remove 32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test) Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.396911901 -0500 EST m=+10.179674112 container remove a6c52234a492d3974488b003466f9269dae3dbd90bd3bc4a842cb9ab66932875 (image=localhost/podman-pause:5.3.1-1733097600, name=0d2100f17275-infra, pod_id=0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702, io.buildah.version=1.38.0) Dec 21 11:32:43 managed-node2 systemd[1]: machine-libpod_pod_0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702.slice: Failed to open /run/systemd/transient/machine-libpod_pod_0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702.slice: No such file or directory Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.405993365 -0500 EST m=+10.188755541 pod remove 0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702 (image=, name=httpd3) Dec 21 11:32:43 managed-node2 podman[28956]: Pods stopped: Dec 21 11:32:43 managed-node2 podman[28956]: 0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702 Dec 21 11:32:43 managed-node2 podman[28956]: Pods removed: Dec 21 11:32:43 managed-node2 podman[28956]: 0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702 Dec 21 11:32:43 managed-node2 podman[28956]: Secrets removed: Dec 21 11:32:43 managed-node2 podman[28956]: Volumes removed: Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.431075178 -0500 EST m=+10.213837357 container create 61716cd302892e2252e6e13ed698e8739b1a809d43e4305353cc34e906dd1a9e (image=localhost/podman-pause:5.3.1-1733097600, name=a8db008d7cc8-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Dec 21 11:32:43 managed-node2 systemd[1]: Created slice machine-libpod_pod_26cc0fa7c8097dd3061079c8f0d79db2a774331739fc0212c12243e67c727199.slice - cgroup machine-libpod_pod_26cc0fa7c8097dd3061079c8f0d79db2a774331739fc0212c12243e67c727199.slice. ░░ Subject: A start job for unit machine-libpod_pod_26cc0fa7c8097dd3061079c8f0d79db2a774331739fc0212c12243e67c727199.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_26cc0fa7c8097dd3061079c8f0d79db2a774331739fc0212c12243e67c727199.slice has finished successfully. ░░ ░░ The job identifier is 2509. Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.470053048 -0500 EST m=+10.252815223 container create 2907e4388cf2d2224f218ff9618b15f878c5d48a6a1130eb9fba43e884f5b009 (image=localhost/podman-pause:5.3.1-1733097600, name=26cc0fa7c809-infra, pod_id=26cc0fa7c8097dd3061079c8f0d79db2a774331739fc0212c12243e67c727199, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, io.buildah.version=1.38.0) Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.476236336 -0500 EST m=+10.258998514 pod create 26cc0fa7c8097dd3061079c8f0d79db2a774331739fc0212c12243e67c727199 (image=, name=httpd3) Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.478414019 -0500 EST m=+10.261176309 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.504509835 -0500 EST m=+10.287272011 container create a6061cf8dd2a28a1b959fccc574d61dff99ebd633d6e5c46e2b9ba7b3c55018c (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=26cc0fa7c8097dd3061079c8f0d79db2a774331739fc0212c12243e67c727199, app=test, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.50489022 -0500 EST m=+10.287652408 container restart 61716cd302892e2252e6e13ed698e8739b1a809d43e4305353cc34e906dd1a9e (image=localhost/podman-pause:5.3.1-1733097600, name=a8db008d7cc8-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Dec 21 11:32:43 managed-node2 systemd[1]: Started libpod-61716cd302892e2252e6e13ed698e8739b1a809d43e4305353cc34e906dd1a9e.scope - libcrun container. ░░ Subject: A start job for unit libpod-61716cd302892e2252e6e13ed698e8739b1a809d43e4305353cc34e906dd1a9e.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-61716cd302892e2252e6e13ed698e8739b1a809d43e4305353cc34e906dd1a9e.scope has finished successfully. ░░ ░░ The job identifier is 2515. Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.548961324 -0500 EST m=+10.331723546 container init 61716cd302892e2252e6e13ed698e8739b1a809d43e4305353cc34e906dd1a9e (image=localhost/podman-pause:5.3.1-1733097600, name=a8db008d7cc8-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.551487902 -0500 EST m=+10.334250212 container start 61716cd302892e2252e6e13ed698e8739b1a809d43e4305353cc34e906dd1a9e (image=localhost/podman-pause:5.3.1-1733097600, name=a8db008d7cc8-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Dec 21 11:32:43 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Dec 21 11:32:43 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Dec 21 11:32:43 managed-node2 kernel: veth1: entered allmulticast mode Dec 21 11:32:43 managed-node2 kernel: veth1: entered promiscuous mode Dec 21 11:32:43 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Dec 21 11:32:43 managed-node2 kernel: podman1: port 2(veth1) entered forwarding state Dec 21 11:32:43 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Dec 21 11:32:43 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Dec 21 11:32:43 managed-node2 kernel: podman1: port 2(veth1) entered forwarding state Dec 21 11:32:43 managed-node2 NetworkManager[775]: [1734798763.5765] manager: (veth1): new Veth device (/org/freedesktop/NetworkManager/Devices/8) Dec 21 11:32:43 managed-node2 NetworkManager[775]: [1734798763.5809] device (veth1): carrier: link connected Dec 21 11:32:43 managed-node2 (udev-worker)[29003]: Network interface NamePolicy= disabled on kernel command line. Dec 21 11:32:43 managed-node2 systemd[1]: Started libpod-2907e4388cf2d2224f218ff9618b15f878c5d48a6a1130eb9fba43e884f5b009.scope - libcrun container. ░░ Subject: A start job for unit libpod-2907e4388cf2d2224f218ff9618b15f878c5d48a6a1130eb9fba43e884f5b009.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-2907e4388cf2d2224f218ff9618b15f878c5d48a6a1130eb9fba43e884f5b009.scope has finished successfully. ░░ ░░ The job identifier is 2522. Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.674483419 -0500 EST m=+10.457245731 container init 2907e4388cf2d2224f218ff9618b15f878c5d48a6a1130eb9fba43e884f5b009 (image=localhost/podman-pause:5.3.1-1733097600, name=26cc0fa7c809-infra, pod_id=26cc0fa7c8097dd3061079c8f0d79db2a774331739fc0212c12243e67c727199, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, io.buildah.version=1.38.0) Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.677263682 -0500 EST m=+10.460025936 container start 2907e4388cf2d2224f218ff9618b15f878c5d48a6a1130eb9fba43e884f5b009 (image=localhost/podman-pause:5.3.1-1733097600, name=26cc0fa7c809-infra, pod_id=26cc0fa7c8097dd3061079c8f0d79db2a774331739fc0212c12243e67c727199, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, io.buildah.version=1.38.0) Dec 21 11:32:43 managed-node2 systemd[1]: Started libpod-a6061cf8dd2a28a1b959fccc574d61dff99ebd633d6e5c46e2b9ba7b3c55018c.scope - libcrun container. ░░ Subject: A start job for unit libpod-a6061cf8dd2a28a1b959fccc574d61dff99ebd633d6e5c46e2b9ba7b3c55018c.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-a6061cf8dd2a28a1b959fccc574d61dff99ebd633d6e5c46e2b9ba7b3c55018c.scope has finished successfully. ░░ ░░ The job identifier is 2529. Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.726895569 -0500 EST m=+10.509657828 container init a6061cf8dd2a28a1b959fccc574d61dff99ebd633d6e5c46e2b9ba7b3c55018c (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=26cc0fa7c8097dd3061079c8f0d79db2a774331739fc0212c12243e67c727199, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.729387453 -0500 EST m=+10.512149712 container start a6061cf8dd2a28a1b959fccc574d61dff99ebd633d6e5c46e2b9ba7b3c55018c (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=26cc0fa7c8097dd3061079c8f0d79db2a774331739fc0212c12243e67c727199, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.735502247 -0500 EST m=+10.518264438 pod start 26cc0fa7c8097dd3061079c8f0d79db2a774331739fc0212c12243e67c727199 (image=, name=httpd3) Dec 21 11:32:43 managed-node2 podman[28956]: Pod: Dec 21 11:32:43 managed-node2 podman[28956]: 26cc0fa7c8097dd3061079c8f0d79db2a774331739fc0212c12243e67c727199 Dec 21 11:32:43 managed-node2 podman[28956]: Container: Dec 21 11:32:43 managed-node2 podman[28956]: a6061cf8dd2a28a1b959fccc574d61dff99ebd633d6e5c46e2b9ba7b3c55018c Dec 21 11:32:43 managed-node2 systemd[1]: Started podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished successfully. ░░ ░░ The job identifier is 2422. Dec 21 11:32:44 managed-node2 sudo[29207]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fvyxrsktvufjdvxghfwczcfatmaofusl ; /usr/bin/python3.12 /var/tmp/ansible-tmp-1734798764.2274415-16922-188999455305959/AnsiballZ_command.py' Dec 21 11:32:44 managed-node2 sudo[29207]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-29207) opened. Dec 21 11:32:44 managed-node2 sudo[29207]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Dec 21 11:32:44 managed-node2 python3.12[29210]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:32:44 managed-node2 systemd[22955]: Started podman-29218.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 112. Dec 21 11:32:44 managed-node2 sudo[29207]: pam_unix(sudo:session): session closed for user podman_basic_user Dec 21 11:32:45 managed-node2 python3.12[29358]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:32:45 managed-node2 python3.12[29498]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:32:45 managed-node2 sudo[29679]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qgioeadtlhugbehxziajjnovvcezkswg ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1734798765.7554495-16999-94045399399875/AnsiballZ_command.py' Dec 21 11:32:45 managed-node2 sudo[29679]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-29679) opened. Dec 21 11:32:45 managed-node2 sudo[29679]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Dec 21 11:32:46 managed-node2 python3.12[29682]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --user list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd1[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:32:46 managed-node2 sudo[29679]: pam_unix(sudo:session): session closed for user podman_basic_user Dec 21 11:32:46 managed-node2 python3.12[29816]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd2[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:32:46 managed-node2 python3.12[29950]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd3[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:32:47 managed-node2 python3.12[30084]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15001/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:32:48 managed-node2 python3.12[30216]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15002/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:32:48 managed-node2 python3.12[30347]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:32:49 managed-node2 python3.12[30479]: ansible-file Invoked with path=/etc/containers/storage.conf state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:32:49 managed-node2 python3.12[30610]: ansible-file Invoked with path=/tmp/lsr_9pquyim__podman state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:32:51 managed-node2 python3.12[30784]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Dec 21 11:32:53 managed-node2 python3.12[30957]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:32:53 managed-node2 python3.12[31088]: ansible-ansible.legacy.dnf Invoked with name=['python3-pyasn1', 'python3-cryptography', 'python3-dbus'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 21 11:32:56 managed-node2 python3.12[31224]: ansible-ansible.legacy.dnf Invoked with name=['certmonger', 'python3-packaging'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 21 11:32:57 managed-node2 dbus-broker-launch[650]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Dec 21 11:32:57 managed-node2 dbus-broker-launch[23428]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Dec 21 11:32:57 managed-node2 dbus-broker-launch[650]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Dec 21 11:32:57 managed-node2 dbus-broker-launch[23428]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Dec 21 11:32:57 managed-node2 dbus-broker-launch[23428]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Dec 21 11:32:57 managed-node2 dbus-broker-launch[650]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Dec 21 11:32:57 managed-node2 dbus-broker-launch[23428]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Dec 21 11:32:57 managed-node2 dbus-broker-launch[23428]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Dec 21 11:32:57 managed-node2 dbus-broker-launch[23428]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Dec 21 11:32:57 managed-node2 systemd[1]: Reload requested from client PID 31232 ('systemctl') (unit session-5.scope)... Dec 21 11:32:57 managed-node2 systemd[1]: Reloading... Dec 21 11:32:57 managed-node2 systemd[1]: Reloading finished in 220 ms. Dec 21 11:32:57 managed-node2 systemd[1]: Started run-r6ee5ccc8ad3942f095fb125ebd648961.service - /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-r6ee5ccc8ad3942f095fb125ebd648961.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-r6ee5ccc8ad3942f095fb125ebd648961.service has finished successfully. ░░ ░░ The job identifier is 2540. Dec 21 11:32:58 managed-node2 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 2618. Dec 21 11:32:58 managed-node2 systemd[1]: Reload requested from client PID 31296 ('systemctl') (unit session-5.scope)... Dec 21 11:32:58 managed-node2 systemd[1]: Reloading... Dec 21 11:32:58 managed-node2 systemd[1]: Reloading finished in 363 ms. Dec 21 11:32:58 managed-node2 systemd[1]: Queuing reload/restart jobs for marked units… Dec 21 11:32:58 managed-node2 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Dec 21 11:32:58 managed-node2 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 2618. Dec 21 11:32:58 managed-node2 systemd[1]: run-r6ee5ccc8ad3942f095fb125ebd648961.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-r6ee5ccc8ad3942f095fb125ebd648961.service has successfully entered the 'dead' state. Dec 21 11:32:59 managed-node2 python3.12[31489]: ansible-file Invoked with name=/etc/certmonger//pre-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//pre-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:32:59 managed-node2 python3.12[31620]: ansible-file Invoked with name=/etc/certmonger//post-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//post-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:33:00 managed-node2 python3.12[31751]: ansible-ansible.legacy.systemd Invoked with name=certmonger state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Dec 21 11:33:00 managed-node2 systemd[1]: Reload requested from client PID 31754 ('systemctl') (unit session-5.scope)... Dec 21 11:33:00 managed-node2 systemd[1]: Reloading... Dec 21 11:33:00 managed-node2 systemd[1]: Reloading finished in 209 ms. Dec 21 11:33:00 managed-node2 systemd[1]: Starting certmonger.service - Certificate monitoring and PKI enrollment... ░░ Subject: A start job for unit certmonger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit certmonger.service has begun execution. ░░ ░░ The job identifier is 2696. Dec 21 11:33:00 managed-node2 (rtmonger)[31811]: certmonger.service: Referenced but unset environment variable evaluates to an empty string: OPTS Dec 21 11:33:00 managed-node2 systemd[1]: Started certmonger.service - Certificate monitoring and PKI enrollment. ░░ Subject: A start job for unit certmonger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit certmonger.service has finished successfully. ░░ ░░ The job identifier is 2696. Dec 21 11:33:01 managed-node2 python3.12[31969]: ansible-fedora.linux_system_roles.certificate_request Invoked with name=quadlet_demo dns=['localhost'] directory=/etc/pki/tls wait=True ca=self-sign __header=# # Ansible managed # # system_role:certificate provider_config_directory=/etc/certmonger provider=certmonger key_usage=['digitalSignature', 'keyEncipherment'] extended_key_usage=['id-kp-serverAuth', 'id-kp-clientAuth'] auto_renew=True ip=None email=None common_name=None country=None state=None locality=None organization=None organizational_unit=None contact_email=None key_size=None owner=None group=None mode=None principal=None run_before=None run_after=None Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 rsyslogd[658]: imjournal: journal files changed, reloading... [v8.2408.0-2.el10 try https://www.rsyslog.com/e/0 ] Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31985]: Certificate in file "/etc/pki/tls/certs/quadlet_demo.crt" issued by CA and saved. Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:02 managed-node2 python3.12[32116]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt Dec 21 11:33:02 managed-node2 python3.12[32247]: ansible-slurp Invoked with path=/etc/pki/tls/private/quadlet_demo.key src=/etc/pki/tls/private/quadlet_demo.key Dec 21 11:33:03 managed-node2 python3.12[32378]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt Dec 21 11:33:03 managed-node2 python3.12[32509]: ansible-ansible.legacy.command Invoked with _raw_params=getcert stop-tracking -f /etc/pki/tls/certs/quadlet_demo.crt _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:33:03 managed-node2 certmonger[31811]: 2024-12-21 11:33:03 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:04 managed-node2 python3.12[32641]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:33:04 managed-node2 python3.12[32772]: ansible-file Invoked with path=/etc/pki/tls/private/quadlet_demo.key state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:33:04 managed-node2 python3.12[32903]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:33:05 managed-node2 python3.12[33034]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:33:05 managed-node2 python3.12[33165]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:33:07 managed-node2 python3.12[33427]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:33:08 managed-node2 python3.12[33564]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Dec 21 11:33:09 managed-node2 python3.12[33696]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:33:11 managed-node2 python3.12[33829]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:33:11 managed-node2 python3.12[33960]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:33:12 managed-node2 python3.12[34091]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 21 11:33:13 managed-node2 python3.12[34223]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Dec 21 11:33:14 managed-node2 python3.12[34356]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Dec 21 11:33:15 managed-node2 python3.12[34489]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Dec 21 11:33:15 managed-node2 python3.12[34620]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Dec 21 11:33:20 managed-node2 python3.12[35227]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:33:22 managed-node2 python3.12[35360]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:33:22 managed-node2 python3.12[35491]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.network follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 21 11:33:23 managed-node2 python3.12[35596]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1734798802.6458833-18916-273275758786371/.source.network dest=/etc/containers/systemd/quadlet-demo.network owner=root group=0 mode=0644 _original_basename=quadlet-demo.network follow=False checksum=e57c08d49aff4bae8daab138d913aeddaa8682a0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:33:24 managed-node2 python3.12[35727]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 21 11:33:24 managed-node2 systemd[1]: Reload requested from client PID 35728 ('systemctl') (unit session-5.scope)... Dec 21 11:33:24 managed-node2 systemd[1]: Reloading... Dec 21 11:33:24 managed-node2 systemd[1]: Reloading finished in 219 ms. Dec 21 11:33:24 managed-node2 python3.12[35915]: ansible-systemd Invoked with name=quadlet-demo-network.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Dec 21 11:33:24 managed-node2 systemd[1]: Starting quadlet-demo-network.service... ░░ Subject: A start job for unit quadlet-demo-network.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-network.service has begun execution. ░░ ░░ The job identifier is 2775. Dec 21 11:33:24 managed-node2 quadlet-demo-network[35919]: systemd-quadlet-demo Dec 21 11:33:24 managed-node2 systemd[1]: Finished quadlet-demo-network.service. ░░ Subject: A start job for unit quadlet-demo-network.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-network.service has finished successfully. ░░ ░░ The job identifier is 2775. Dec 21 11:33:25 managed-node2 python3.12[36057]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:33:27 managed-node2 python3.12[36190]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:33:28 managed-node2 python3.12[36321]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.volume follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 21 11:33:28 managed-node2 python3.12[36426]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1734798807.9232495-19193-85517954084435/.source.volume dest=/etc/containers/systemd/quadlet-demo-mysql.volume owner=root group=0 mode=0644 _original_basename=quadlet-demo-mysql.volume follow=False checksum=585f8cbdf0ec73000f9227dcffbef71e9552ea4a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:33:29 managed-node2 python3.12[36557]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 21 11:33:29 managed-node2 systemd[1]: Reload requested from client PID 36558 ('systemctl') (unit session-5.scope)... Dec 21 11:33:29 managed-node2 systemd[1]: Reloading... Dec 21 11:33:29 managed-node2 systemd[1]: Reloading finished in 208 ms. Dec 21 11:33:30 managed-node2 python3.12[36744]: ansible-systemd Invoked with name=quadlet-demo-mysql-volume.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Dec 21 11:33:30 managed-node2 systemd[1]: Starting quadlet-demo-mysql-volume.service... ░░ Subject: A start job for unit quadlet-demo-mysql-volume.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql-volume.service has begun execution. ░░ ░░ The job identifier is 2859. Dec 21 11:33:30 managed-node2 podman[36748]: 2024-12-21 11:33:30.173025703 -0500 EST m=+0.026031217 volume create systemd-quadlet-demo-mysql Dec 21 11:33:30 managed-node2 quadlet-demo-mysql-volume[36748]: systemd-quadlet-demo-mysql Dec 21 11:33:30 managed-node2 systemd[1]: Finished quadlet-demo-mysql-volume.service. ░░ Subject: A start job for unit quadlet-demo-mysql-volume.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql-volume.service has finished successfully. ░░ ░░ The job identifier is 2859. Dec 21 11:33:31 managed-node2 python3.12[36887]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:33:32 managed-node2 python3.12[37020]: ansible-file Invoked with path=/tmp/quadlet_demo state=directory owner=root group=root mode=0777 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:33:39 managed-node2 podman[37160]: 2024-12-21 11:33:39.053601702 -0500 EST m=+5.698706357 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6 Dec 21 11:33:39 managed-node2 python3.12[37474]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:33:39 managed-node2 python3.12[37605]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 21 11:33:40 managed-node2 python3.12[37710]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/systemd/quadlet-demo-mysql.container owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1734798819.634735-19706-34961030233952/.source.container _original_basename=.ahp1bvs1 follow=False checksum=ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:33:40 managed-node2 python3.12[37841]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 21 11:33:40 managed-node2 systemd[1]: Reload requested from client PID 37842 ('systemctl') (unit session-5.scope)... Dec 21 11:33:40 managed-node2 systemd[1]: Reloading... Dec 21 11:33:41 managed-node2 systemd[1]: Reloading finished in 223 ms. Dec 21 11:33:41 managed-node2 python3.12[38028]: ansible-systemd Invoked with name=quadlet-demo-mysql.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Dec 21 11:33:41 managed-node2 systemd[1]: Starting quadlet-demo-mysql.service... ░░ Subject: A start job for unit quadlet-demo-mysql.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql.service has begun execution. ░░ ░░ The job identifier is 2943. Dec 21 11:33:41 managed-node2 podman[38032]: 2024-12-21 11:33:41.671006526 -0500 EST m=+0.041799247 container create 203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474 (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Dec 21 11:33:41 managed-node2 NetworkManager[775]: [1734798821.7008] manager: (podman2): new Bridge device (/org/freedesktop/NetworkManager/Devices/9) Dec 21 11:33:41 managed-node2 kernel: podman2: port 1(veth2) entered blocking state Dec 21 11:33:41 managed-node2 kernel: podman2: port 1(veth2) entered disabled state Dec 21 11:33:41 managed-node2 kernel: veth2: entered allmulticast mode Dec 21 11:33:41 managed-node2 kernel: veth2: entered promiscuous mode Dec 21 11:33:41 managed-node2 kernel: podman2: port 1(veth2) entered blocking state Dec 21 11:33:41 managed-node2 kernel: podman2: port 1(veth2) entered forwarding state Dec 21 11:33:41 managed-node2 NetworkManager[775]: [1734798821.7164] device (veth2): carrier: link connected Dec 21 11:33:41 managed-node2 NetworkManager[775]: [1734798821.7167] manager: (veth2): new Veth device (/org/freedesktop/NetworkManager/Devices/10) Dec 21 11:33:41 managed-node2 NetworkManager[775]: [1734798821.7177] device (podman2): carrier: link connected Dec 21 11:33:41 managed-node2 (udev-worker)[38047]: Network interface NamePolicy= disabled on kernel command line. Dec 21 11:33:41 managed-node2 (udev-worker)[38046]: Network interface NamePolicy= disabled on kernel command line. Dec 21 11:33:41 managed-node2 podman[38032]: 2024-12-21 11:33:41.654759215 -0500 EST m=+0.025552067 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6 Dec 21 11:33:41 managed-node2 NetworkManager[775]: [1734798821.7687] device (podman2): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Dec 21 11:33:41 managed-node2 NetworkManager[775]: [1734798821.7707] device (podman2): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Dec 21 11:33:41 managed-node2 NetworkManager[775]: [1734798821.7718] device (podman2): Activation: starting connection 'podman2' (f08971b4-089a-4771-92e2-3a619f890dc5) Dec 21 11:33:41 managed-node2 NetworkManager[775]: [1734798821.7719] device (podman2): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Dec 21 11:33:41 managed-node2 NetworkManager[775]: [1734798821.7725] device (podman2): state change: prepare -> config (reason 'none', managed-type: 'external') Dec 21 11:33:41 managed-node2 NetworkManager[775]: [1734798821.7729] device (podman2): state change: config -> ip-config (reason 'none', managed-type: 'external') Dec 21 11:33:41 managed-node2 NetworkManager[775]: [1734798821.7732] device (podman2): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Dec 21 11:33:41 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 3030. Dec 21 11:33:41 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 3030. Dec 21 11:33:41 managed-node2 NetworkManager[775]: [1734798821.8087] device (podman2): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Dec 21 11:33:41 managed-node2 NetworkManager[775]: [1734798821.8089] device (podman2): state change: secondaries -> activated (reason 'none', managed-type: 'external') Dec 21 11:33:41 managed-node2 NetworkManager[775]: [1734798821.8099] device (podman2): Activation: successful, device activated. Dec 21 11:33:41 managed-node2 systemd[1]: Started 203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474-503f4df31c37e8b6.timer - /usr/bin/podman healthcheck run 203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474. ░░ Subject: A start job for unit 203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474-503f4df31c37e8b6.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit 203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474-503f4df31c37e8b6.timer has finished successfully. ░░ ░░ The job identifier is 3109. Dec 21 11:33:41 managed-node2 podman[38032]: 2024-12-21 11:33:41.870393132 -0500 EST m=+0.241186044 container init 203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474 (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Dec 21 11:33:41 managed-node2 systemd[1]: Started quadlet-demo-mysql.service. ░░ Subject: A start job for unit quadlet-demo-mysql.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql.service has finished successfully. ░░ ░░ The job identifier is 2943. Dec 21 11:33:41 managed-node2 podman[38032]: 2024-12-21 11:33:41.897984688 -0500 EST m=+0.268777553 container start 203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474 (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Dec 21 11:33:41 managed-node2 quadlet-demo-mysql[38032]: 203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474 Dec 21 11:33:42 managed-node2 podman[38090]: 2024-12-21 11:33:42.052326045 -0500 EST m=+0.141002444 container health_status 203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474 (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=0, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Dec 21 11:33:42 managed-node2 python3.12[38285]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:33:44 managed-node2 python3.12[38429]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:33:44 managed-node2 python3.12[38560]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/envoy-proxy-configmap.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 21 11:33:44 managed-node2 python3.12[38665]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1734798824.4087186-19912-106128108103025/.source.yml dest=/etc/containers/systemd/envoy-proxy-configmap.yml owner=root group=0 mode=0644 _original_basename=envoy-proxy-configmap.yml follow=False checksum=d681c7d56f912150d041873e880818b22a90c188 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:33:45 managed-node2 python3.12[38820]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 21 11:33:45 managed-node2 systemd[1]: Reload requested from client PID 38821 ('systemctl') (unit session-5.scope)... Dec 21 11:33:45 managed-node2 systemd[1]: Reloading... Dec 21 11:33:45 managed-node2 systemd[1]: Reloading finished in 230 ms. Dec 21 11:33:46 managed-node2 python3.12[39008]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:33:48 managed-node2 python3.12[39178]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:33:48 managed-node2 python3.12[39309]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 21 11:33:49 managed-node2 python3.12[39414]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/systemd/quadlet-demo.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1734798828.4374363-20045-16704362976477/.source.yml _original_basename=.8i5lgawn follow=False checksum=998dccde0483b1654327a46ddd89cbaa47650370 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:33:50 managed-node2 python3.12[39552]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 21 11:33:50 managed-node2 systemd[1]: Reload requested from client PID 39553 ('systemctl') (unit session-5.scope)... Dec 21 11:33:50 managed-node2 systemd[1]: Reloading... Dec 21 11:33:50 managed-node2 systemd[1]: Reloading finished in 220 ms. Dec 21 11:33:51 managed-node2 python3.12[39740]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:33:51 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Dec 21 11:33:51 managed-node2 python3.12[39875]: ansible-slurp Invoked with path=/etc/containers/systemd/quadlet-demo.yml src=/etc/containers/systemd/quadlet-demo.yml Dec 21 11:33:52 managed-node2 python3.12[40028]: ansible-file Invoked with path=/tmp/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:33:53 managed-node2 python3.12[40159]: ansible-file Invoked with path=/tmp/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:34:04 managed-node2 podman[40299]: 2024-12-21 11:34:04.905781344 -0500 EST m=+11.319455823 image pull fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache Dec 21 11:34:09 managed-node2 systemd[22955]: Starting grub-boot-success.service - Mark boot as successful... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 118. Dec 21 11:34:10 managed-node2 systemd[22955]: Finished grub-boot-success.service - Mark boot as successful. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 118. Dec 21 11:34:10 managed-node2 podman[40718]: 2024-12-21 11:34:10.382566219 -0500 EST m=+5.015616934 image pull 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0 Dec 21 11:34:10 managed-node2 python3.12[40983]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:34:11 managed-node2 python3.12[41114]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.kube follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 21 11:34:11 managed-node2 python3.12[41219]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1734798850.9322016-20524-56637568013428/.source.kube dest=/etc/containers/systemd/quadlet-demo.kube owner=root group=0 mode=0644 _original_basename=quadlet-demo.kube follow=False checksum=7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:34:12 managed-node2 python3.12[41350]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 21 11:34:12 managed-node2 systemd[1]: Reload requested from client PID 41351 ('systemctl') (unit session-5.scope)... Dec 21 11:34:12 managed-node2 systemd[1]: Reloading... Dec 21 11:34:12 managed-node2 podman[41352]: 2024-12-21 11:34:12.317135096 -0500 EST m=+0.156343617 container health_status 203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474 (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=0, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Dec 21 11:34:12 managed-node2 systemd[1]: Reloading finished in 271 ms. Dec 21 11:34:12 managed-node2 python3.12[41553]: ansible-systemd Invoked with name=quadlet-demo.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Dec 21 11:34:12 managed-node2 systemd[1]: Starting quadlet-demo.service... ░░ Subject: A start job for unit quadlet-demo.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo.service has begun execution. ░░ ░░ The job identifier is 3343. Dec 21 11:34:13 managed-node2 quadlet-demo[41557]: Pods stopped: Dec 21 11:34:13 managed-node2 quadlet-demo[41557]: Pods removed: Dec 21 11:34:13 managed-node2 quadlet-demo[41557]: Secrets removed: Dec 21 11:34:13 managed-node2 quadlet-demo[41557]: Volumes removed: Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.046407256 -0500 EST m=+0.035120762 volume create wp-pv-claim Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.07359806 -0500 EST m=+0.062311564 container create 9f1e833e8645852665a22030536ca5c52cfec7decb76585438cf0051c8f4bf45 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.081892004 -0500 EST m=+0.070605523 volume create envoy-proxy-config Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.088568557 -0500 EST m=+0.077282063 volume create envoy-certificates Dec 21 11:34:13 managed-node2 systemd[1]: Created slice machine-libpod_pod_a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40.slice - cgroup machine-libpod_pod_a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40.slice. ░░ Subject: A start job for unit machine-libpod_pod_a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40.slice has finished successfully. ░░ ░░ The job identifier is 3430. Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.136860015 -0500 EST m=+0.125573521 container create a0a9c1950f0b57658663ef8cf7f88c8aed610a9f8597ed9521c2c45698fab2ba (image=localhost/podman-pause:5.3.1-1733097600, name=a20a2c426a00-infra, pod_id=a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.142994513 -0500 EST m=+0.131708015 pod create a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40 (image=, name=quadlet-demo) Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.184907236 -0500 EST m=+0.173620880 container create ab1ed70273076e1de0bfb5306194f08cce3c9aee5af296286eb7e8f0bd01dbb8 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.214657838 -0500 EST m=+0.203371344 container create d62af06fe80a7ce9e88ea311b2b6715b83549919bfb828573488c6dd1851f008 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.21502922 -0500 EST m=+0.203742738 container restart 9f1e833e8645852665a22030536ca5c52cfec7decb76585438cf0051c8f4bf45 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.152599938 -0500 EST m=+0.141313669 image pull fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.18931206 -0500 EST m=+0.178025706 image pull 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0 Dec 21 11:34:13 managed-node2 systemd[1]: Started libpod-9f1e833e8645852665a22030536ca5c52cfec7decb76585438cf0051c8f4bf45.scope - libcrun container. ░░ Subject: A start job for unit libpod-9f1e833e8645852665a22030536ca5c52cfec7decb76585438cf0051c8f4bf45.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-9f1e833e8645852665a22030536ca5c52cfec7decb76585438cf0051c8f4bf45.scope has finished successfully. ░░ ░░ The job identifier is 3436. Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.265681715 -0500 EST m=+0.254395360 container init 9f1e833e8645852665a22030536ca5c52cfec7decb76585438cf0051c8f4bf45 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.269159348 -0500 EST m=+0.257872902 container start 9f1e833e8645852665a22030536ca5c52cfec7decb76585438cf0051c8f4bf45 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 21 11:34:13 managed-node2 kernel: podman2: port 2(veth3) entered blocking state Dec 21 11:34:13 managed-node2 kernel: podman2: port 2(veth3) entered disabled state Dec 21 11:34:13 managed-node2 kernel: veth3: entered allmulticast mode Dec 21 11:34:13 managed-node2 kernel: veth3: entered promiscuous mode Dec 21 11:34:13 managed-node2 kernel: podman2: port 2(veth3) entered blocking state Dec 21 11:34:13 managed-node2 kernel: podman2: port 2(veth3) entered forwarding state Dec 21 11:34:13 managed-node2 NetworkManager[775]: [1734798853.3044] manager: (veth3): new Veth device (/org/freedesktop/NetworkManager/Devices/11) Dec 21 11:34:13 managed-node2 NetworkManager[775]: [1734798853.3057] device (veth3): carrier: link connected Dec 21 11:34:13 managed-node2 (udev-worker)[41577]: Network interface NamePolicy= disabled on kernel command line. Dec 21 11:34:13 managed-node2 systemd[1]: Started libpod-a0a9c1950f0b57658663ef8cf7f88c8aed610a9f8597ed9521c2c45698fab2ba.scope - libcrun container. ░░ Subject: A start job for unit libpod-a0a9c1950f0b57658663ef8cf7f88c8aed610a9f8597ed9521c2c45698fab2ba.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-a0a9c1950f0b57658663ef8cf7f88c8aed610a9f8597ed9521c2c45698fab2ba.scope has finished successfully. ░░ ░░ The job identifier is 3443. Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.392061159 -0500 EST m=+0.380774732 container init a0a9c1950f0b57658663ef8cf7f88c8aed610a9f8597ed9521c2c45698fab2ba (image=localhost/podman-pause:5.3.1-1733097600, name=a20a2c426a00-infra, pod_id=a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.395158122 -0500 EST m=+0.383871707 container start a0a9c1950f0b57658663ef8cf7f88c8aed610a9f8597ed9521c2c45698fab2ba (image=localhost/podman-pause:5.3.1-1733097600, name=a20a2c426a00-infra, pod_id=a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Dec 21 11:34:13 managed-node2 systemd[1]: Started libpod-ab1ed70273076e1de0bfb5306194f08cce3c9aee5af296286eb7e8f0bd01dbb8.scope - libcrun container. ░░ Subject: A start job for unit libpod-ab1ed70273076e1de0bfb5306194f08cce3c9aee5af296286eb7e8f0bd01dbb8.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-ab1ed70273076e1de0bfb5306194f08cce3c9aee5af296286eb7e8f0bd01dbb8.scope has finished successfully. ░░ ░░ The job identifier is 3450. Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.45996148 -0500 EST m=+0.448675022 container init ab1ed70273076e1de0bfb5306194f08cce3c9aee5af296286eb7e8f0bd01dbb8 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.462528439 -0500 EST m=+0.451242015 container start ab1ed70273076e1de0bfb5306194f08cce3c9aee5af296286eb7e8f0bd01dbb8 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 21 11:34:13 managed-node2 systemd[1]: Started libpod-d62af06fe80a7ce9e88ea311b2b6715b83549919bfb828573488c6dd1851f008.scope - libcrun container. ░░ Subject: A start job for unit libpod-d62af06fe80a7ce9e88ea311b2b6715b83549919bfb828573488c6dd1851f008.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-d62af06fe80a7ce9e88ea311b2b6715b83549919bfb828573488c6dd1851f008.scope has finished successfully. ░░ ░░ The job identifier is 3457. Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.536515665 -0500 EST m=+0.525229213 container init d62af06fe80a7ce9e88ea311b2b6715b83549919bfb828573488c6dd1851f008 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.538973229 -0500 EST m=+0.527686824 container start d62af06fe80a7ce9e88ea311b2b6715b83549919bfb828573488c6dd1851f008 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.546111037 -0500 EST m=+0.534824617 pod start a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40 (image=, name=quadlet-demo) Dec 21 11:34:13 managed-node2 quadlet-demo[41557]: Volumes: Dec 21 11:34:13 managed-node2 systemd[1]: Started quadlet-demo.service. ░░ Subject: A start job for unit quadlet-demo.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo.service has finished successfully. ░░ ░░ The job identifier is 3343. Dec 21 11:34:13 managed-node2 quadlet-demo[41557]: wp-pv-claim Dec 21 11:34:13 managed-node2 quadlet-demo[41557]: Pod: Dec 21 11:34:13 managed-node2 quadlet-demo[41557]: a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40 Dec 21 11:34:13 managed-node2 quadlet-demo[41557]: Containers: Dec 21 11:34:13 managed-node2 quadlet-demo[41557]: ab1ed70273076e1de0bfb5306194f08cce3c9aee5af296286eb7e8f0bd01dbb8 Dec 21 11:34:13 managed-node2 quadlet-demo[41557]: d62af06fe80a7ce9e88ea311b2b6715b83549919bfb828573488c6dd1851f008 Dec 21 11:34:14 managed-node2 python3.12[41820]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /etc/containers/systemd _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:34:14 managed-node2 python3.12[42027]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps -a _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:34:15 managed-node2 python3.12[42217]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:34:15 managed-node2 python3.12[42356]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod ps --ctr-ids --ctr-names --ctr-status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:34:16 managed-node2 python3.12[42495]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail; systemctl list-units | grep quadlet _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:34:16 managed-node2 python3.12[42629]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:34:22 managed-node2 python3.12[42760]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:34:27 managed-node2 python3.12[42891]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:34:33 managed-node2 python3.12[43022]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:34:38 managed-node2 python3.12[43153]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:34:43 managed-node2 podman[43175]: 2024-12-21 11:34:43.112259851 -0500 EST m=+0.102896688 container health_status 203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474 (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=0, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Dec 21 11:34:44 managed-node2 python3.12[43300]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:34:49 managed-node2 python3.12[43431]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:34:49 managed-node2 python3.12[43562]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None TASK [Check] ******************************************************************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:148 Saturday 21 December 2024 11:34:50 -0500 (0:00:00.504) 0:01:59.288 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "-a" ], "delta": "0:00:00.038957", "end": "2024-12-21 11:34:50.528112", "rc": 0, "start": "2024-12-21 11:34:50.489155" } STDOUT: CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES 1e5d0ec512aa localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes ce41ff3061ea-service 6232f80e7e24 localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 0.0.0.0:15002->80/tcp d2f6641bb2ef-infra fb676e5504a3 quay.io/libpod/testimage:20210610 2 minutes ago Up 2 minutes 0.0.0.0:15002->80/tcp httpd2-httpd2 61716cd30289 localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes a8db008d7cc8-service 2907e4388cf2 localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 0.0.0.0:15003->80/tcp 26cc0fa7c809-infra a6061cf8dd2a quay.io/libpod/testimage:20210610 2 minutes ago Up 2 minutes 0.0.0.0:15003->80/tcp httpd3-httpd3 203d365a6d08 quay.io/linux-system-roles/mysql:5.6 mysqld About a minute ago Up About a minute (healthy) 3306/tcp quadlet-demo-mysql 9f1e833e8645 localhost/podman-pause:5.3.1-1733097600 37 seconds ago Up 37 seconds a96f3a51b8d1-service a0a9c1950f0b localhost/podman-pause:5.3.1-1733097600 37 seconds ago Up 37 seconds 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp a20a2c426a00-infra ab1ed7027307 quay.io/linux-system-roles/wordpress:4.8-apache apache2-foregroun... 37 seconds ago Up 37 seconds 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp, 80/tcp quadlet-demo-wordpress d62af06fe80a quay.io/linux-system-roles/envoyproxy:v1.25.0 envoy -c /etc/env... 37 seconds ago Up 37 seconds 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp, 10000/tcp quadlet-demo-envoy TASK [Check pods] ************************************************************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:152 Saturday 21 December 2024 11:34:50 -0500 (0:00:00.440) 0:01:59.728 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "pod", "ps", "--ctr-ids", "--ctr-names", "--ctr-status" ], "delta": "0:00:00.039054", "end": "2024-12-21 11:34:50.971861", "failed_when_result": false, "rc": 0, "start": "2024-12-21 11:34:50.932807" } STDOUT: POD ID NAME STATUS CREATED INFRA ID IDS NAMES STATUS a20a2c426a00 quadlet-demo Running 37 seconds ago a0a9c1950f0b a0a9c1950f0b,ab1ed7027307,d62af06fe80a a20a2c426a00-infra,quadlet-demo-wordpress,quadlet-demo-envoy running,running,running 26cc0fa7c809 httpd3 Running 2 minutes ago 2907e4388cf2 2907e4388cf2,a6061cf8dd2a 26cc0fa7c809-infra,httpd3-httpd3 running,running d2f6641bb2ef httpd2 Running 2 minutes ago 6232f80e7e24 6232f80e7e24,fb676e5504a3 d2f6641bb2ef-infra,httpd2-httpd2 running,running TASK [Check systemd] *********************************************************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:157 Saturday 21 December 2024 11:34:51 -0500 (0:00:00.418) 0:02:00.146 ***** ok: [managed-node2] => { "changed": false, "cmd": "set -euo pipefail; systemctl list-units --all | grep quadlet", "delta": "0:00:00.015330", "end": "2024-12-21 11:34:51.361606", "failed_when_result": false, "rc": 0, "start": "2024-12-21 11:34:51.346276" } STDOUT: quadlet-demo-mysql-volume.service loaded active exited quadlet-demo-mysql-volume.service quadlet-demo-mysql.service loaded active running quadlet-demo-mysql.service quadlet-demo-network.service loaded active exited quadlet-demo-network.service quadlet-demo.service loaded active running quadlet-demo.service TASK [LS] ********************************************************************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:165 Saturday 21 December 2024 11:34:51 -0500 (0:00:00.390) 0:02:00.536 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "ls", "-alrtF", "/etc/systemd/system" ], "delta": "0:00:00.005154", "end": "2024-12-21 11:34:51.737530", "failed_when_result": false, "rc": 0, "start": "2024-12-21 11:34:51.732376" } STDOUT: total 12 drwxr-xr-x. 5 root root 47 Dec 20 02:21 ../ lrwxrwxrwx. 1 root root 43 Dec 20 02:21 dbus.service -> /usr/lib/systemd/system/dbus-broker.service drwxr-xr-x. 2 root root 32 Dec 20 02:21 getty.target.wants/ lrwxrwxrwx. 1 root root 37 Dec 20 02:21 ctrl-alt-del.target -> /usr/lib/systemd/system/reboot.target drwxr-xr-x. 2 root root 48 Dec 20 02:22 network-online.target.wants/ lrwxrwxrwx. 1 root root 57 Dec 20 02:22 dbus-org.freedesktop.nm-dispatcher.service -> /usr/lib/systemd/system/NetworkManager-dispatcher.service drwxr-xr-x. 2 root root 76 Dec 20 02:22 timers.target.wants/ drwxr-xr-x. 2 root root 38 Dec 20 02:22 dev-virtio\x2dports-org.qemu.guest_agent.0.device.wants/ lrwxrwxrwx. 1 root root 41 Dec 20 02:25 default.target -> /usr/lib/systemd/system/multi-user.target drwxr-xr-x. 2 root root 31 Dec 20 02:37 remote-fs.target.wants/ drwxr-xr-x. 2 root root 119 Dec 20 02:38 cloud-init.target.wants/ drwxr-xr-x. 2 root root 4096 Dec 20 02:38 sysinit.target.wants/ drwxr-xr-x. 2 root root 113 Dec 21 11:29 sockets.target.wants/ lrwxrwxrwx. 1 root root 41 Dec 21 11:29 dbus-org.fedoraproject.FirewallD1.service -> /usr/lib/systemd/system/firewalld.service drwxr-xr-x. 12 root root 4096 Dec 21 11:32 ./ drwxr-xr-x. 2 root root 162 Dec 21 11:32 default.target.wants/ drwxr-xr-x. 2 root root 4096 Dec 21 11:33 multi-user.target.wants/ TASK [Cleanup] ***************************************************************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:172 Saturday 21 December 2024 11:34:51 -0500 (0:00:00.373) 0:02:00.910 ***** included: fedora.linux_system_roles.podman for managed-node2 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 21 December 2024 11:34:51 -0500 (0:00:00.074) 0:02:00.985 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 21 December 2024 11:34:51 -0500 (0:00:00.090) 0:02:01.075 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 21 December 2024 11:34:52 -0500 (0:00:00.036) 0:02:01.112 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 21 December 2024 11:34:52 -0500 (0:00:00.030) 0:02:01.142 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 21 December 2024 11:34:52 -0500 (0:00:00.031) 0:02:01.173 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 21 December 2024 11:34:52 -0500 (0:00:00.034) 0:02:01.208 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 21 December 2024 11:34:52 -0500 (0:00:00.029) 0:02:01.237 ***** ok: [managed-node2] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 21 December 2024 11:34:52 -0500 (0:00:00.070) 0:02:01.308 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 21 December 2024 11:34:52 -0500 (0:00:00.755) 0:02:02.063 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 21 December 2024 11:34:52 -0500 (0:00:00.030) 0:02:02.094 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 21 December 2024 11:34:53 -0500 (0:00:00.034) 0:02:02.128 ***** skipping: [managed-node2] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 21 December 2024 11:34:53 -0500 (0:00:00.029) 0:02:02.158 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 21 December 2024 11:34:53 -0500 (0:00:00.032) 0:02:02.191 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 21 December 2024 11:34:53 -0500 (0:00:00.033) 0:02:02.225 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.024657", "end": "2024-12-21 11:34:53.440272", "rc": 0, "start": "2024-12-21 11:34:53.415615" } STDOUT: podman version 5.3.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 21 December 2024 11:34:53 -0500 (0:00:00.390) 0:02:02.615 ***** ok: [managed-node2] => { "ansible_facts": { "podman_version": "5.3.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 21 December 2024 11:34:53 -0500 (0:00:00.032) 0:02:02.647 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 21 December 2024 11:34:53 -0500 (0:00:00.029) 0:02:02.677 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 21 December 2024 11:34:53 -0500 (0:00:00.034) 0:02:02.712 ***** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 21 December 2024 11:34:53 -0500 (0:00:00.037) 0:02:02.750 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 21 December 2024 11:34:53 -0500 (0:00:00.062) 0:02:02.812 ***** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 21 December 2024 11:34:53 -0500 (0:00:00.107) 0:02:02.920 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 21 December 2024 11:34:53 -0500 (0:00:00.070) 0:02:02.991 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 21 December 2024 11:34:53 -0500 (0:00:00.037) 0:02:03.028 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 21 December 2024 11:34:53 -0500 (0:00:00.034) 0:02:03.062 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 21 December 2024 11:34:53 -0500 (0:00:00.041) 0:02:03.103 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1734798569.6419525, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1734798549.6808388, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3128486407", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 21 December 2024 11:34:54 -0500 (0:00:00.394) 0:02:03.498 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 21 December 2024 11:34:54 -0500 (0:00:00.029) 0:02:03.527 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 21 December 2024 11:34:54 -0500 (0:00:00.030) 0:02:03.557 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 21 December 2024 11:34:54 -0500 (0:00:00.028) 0:02:03.586 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 21 December 2024 11:34:54 -0500 (0:00:00.028) 0:02:03.615 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 21 December 2024 11:34:54 -0500 (0:00:00.032) 0:02:03.647 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 21 December 2024 11:34:54 -0500 (0:00:00.034) 0:02:03.682 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 21 December 2024 11:34:54 -0500 (0:00:00.034) 0:02:03.716 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 21 December 2024 11:34:54 -0500 (0:00:00.037) 0:02:03.753 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Saturday 21 December 2024 11:34:54 -0500 (0:00:00.041) 0:02:03.795 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 21 December 2024 11:34:54 -0500 (0:00:00.056) 0:02:03.852 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 21 December 2024 11:34:54 -0500 (0:00:00.030) 0:02:03.883 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Saturday 21 December 2024 11:34:54 -0500 (0:00:00.029) 0:02:03.912 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 21 December 2024 11:34:54 -0500 (0:00:00.121) 0:02:04.033 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 21 December 2024 11:34:54 -0500 (0:00:00.037) 0:02:04.071 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Saturday 21 December 2024 11:34:54 -0500 (0:00:00.032) 0:02:04.104 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Saturday 21 December 2024 11:34:55 -0500 (0:00:00.072) 0:02:04.176 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Saturday 21 December 2024 11:34:55 -0500 (0:00:00.034) 0:02:04.210 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Saturday 21 December 2024 11:34:55 -0500 (0:00:00.043) 0:02:04.254 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Saturday 21 December 2024 11:34:55 -0500 (0:00:00.060) 0:02:04.315 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Saturday 21 December 2024 11:34:55 -0500 (0:00:00.030) 0:02:04.346 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Saturday 21 December 2024 11:34:55 -0500 (0:00:00.031) 0:02:04.377 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Saturday 21 December 2024 11:34:55 -0500 (0:00:00.029) 0:02:04.406 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Saturday 21 December 2024 11:34:55 -0500 (0:00:00.030) 0:02:04.436 ***** included: fedora.linux_system_roles.firewall for managed-node2 TASK [fedora.linux_system_roles.firewall : Setup firewalld] ******************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2 Saturday 21 December 2024 11:34:55 -0500 (0:00:00.096) 0:02:04.533 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node2 TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2 Saturday 21 December 2024 11:34:55 -0500 (0:00:00.054) 0:02:04.587 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if system is ostree] ********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10 Saturday 21 December 2024 11:34:55 -0500 (0:00:00.040) 0:02:04.628 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15 Saturday 21 December 2024 11:34:55 -0500 (0:00:00.036) 0:02:04.664 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22 Saturday 21 December 2024 11:34:55 -0500 (0:00:00.096) 0:02:04.761 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27 Saturday 21 December 2024 11:34:55 -0500 (0:00:00.038) 0:02:04.799 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Install firewalld] ****************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31 Saturday 21 December 2024 11:34:55 -0500 (0:00:00.040) 0:02:04.840 ***** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: firewalld TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:43 Saturday 21 December 2024 11:34:56 -0500 (0:00:00.755) 0:02:05.595 ***** skipping: [managed-node2] => { "false_condition": "__firewall_is_transactional | d(false)" } TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:48 Saturday 21 December 2024 11:34:56 -0500 (0:00:00.050) 0:02:05.646 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:53 Saturday 21 December 2024 11:34:56 -0500 (0:00:00.035) 0:02:05.681 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Collect service facts] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5 Saturday 21 December 2024 11:34:56 -0500 (0:00:00.036) 0:02:05.718 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:9 Saturday 21 December 2024 11:34:56 -0500 (0:00:00.034) 0:02:05.753 ***** skipping: [managed-node2] => (item=nftables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "nftables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=iptables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "iptables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=ufw) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "ufw", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] *********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22 Saturday 21 December 2024 11:34:56 -0500 (0:00:00.042) 0:02:05.795 ***** ok: [managed-node2] => { "changed": false, "name": "firewalld", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2024-12-21 11:29:35 EST", "ActiveEnterTimestampMonotonic": "336278957", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "basic.target dbus.socket polkit.service dbus-broker.service sysinit.target system.slice", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2024-12-21 11:29:34 EST", "AssertTimestampMonotonic": "336011047", "Before": "shutdown.target multi-user.target network-pre.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "643818000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2024-12-21 11:29:34 EST", "ConditionTimestampMonotonic": "336011044", "ConfigurationDirectoryMode": "0755", "Conflicts": "iptables.service ip6tables.service ebtables.service shutdown.target ipset.service", "ControlGroup": "/system.slice/firewalld.service", "ControlGroupId": "4599", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveCPUs": "0-1", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveMemoryNodes": "0", "EffectiveTasksMax": "22349", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Sat 2024-12-21 11:29:34 EST", "ExecMainHandoffTimestampMonotonic": "336045495", "ExecMainPID": "10892", "ExecMainStartTimestamp": "Sat 2024-12-21 11:29:34 EST", "ExecMainStartTimestampMonotonic": "336014579", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2024-12-21 11:29:34 EST", "InactiveExitTimestampMonotonic": "336015307", "InvocationID": "99d07a1d51fa44d09a9b8ad11c89f152", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "10892", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "2486267904", "MemoryCurrent": "33763328", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "34119680", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectHome": "yes", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket sysinit.target system.slice", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2024-12-21 11:34:12 EST", "StateChangeTimestampMonotonic": "613658551", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28 Saturday 21 December 2024 11:34:57 -0500 (0:00:00.553) 0:02:06.348 ***** ok: [managed-node2] => { "changed": false, "enabled": true, "name": "firewalld", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2024-12-21 11:29:35 EST", "ActiveEnterTimestampMonotonic": "336278957", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "basic.target dbus.socket polkit.service dbus-broker.service sysinit.target system.slice", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2024-12-21 11:29:34 EST", "AssertTimestampMonotonic": "336011047", "Before": "shutdown.target multi-user.target network-pre.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "643818000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2024-12-21 11:29:34 EST", "ConditionTimestampMonotonic": "336011044", "ConfigurationDirectoryMode": "0755", "Conflicts": "iptables.service ip6tables.service ebtables.service shutdown.target ipset.service", "ControlGroup": "/system.slice/firewalld.service", "ControlGroupId": "4599", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveCPUs": "0-1", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveMemoryNodes": "0", "EffectiveTasksMax": "22349", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Sat 2024-12-21 11:29:34 EST", "ExecMainHandoffTimestampMonotonic": "336045495", "ExecMainPID": "10892", "ExecMainStartTimestamp": "Sat 2024-12-21 11:29:34 EST", "ExecMainStartTimestampMonotonic": "336014579", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2024-12-21 11:29:34 EST", "InactiveExitTimestampMonotonic": "336015307", "InvocationID": "99d07a1d51fa44d09a9b8ad11c89f152", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "10892", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "2498940928", "MemoryCurrent": "33763328", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "34119680", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectHome": "yes", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket sysinit.target system.slice", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2024-12-21 11:34:12 EST", "StateChangeTimestampMonotonic": "613658551", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:34 Saturday 21 December 2024 11:34:57 -0500 (0:00:00.581) 0:02:06.929 ***** ok: [managed-node2] => { "ansible_facts": { "__firewall_previous_replaced": false, "__firewall_python_cmd": "/usr/bin/python3.12", "__firewall_report_changed": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:43 Saturday 21 December 2024 11:34:57 -0500 (0:00:00.046) 0:02:06.975 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:55 Saturday 21 December 2024 11:34:57 -0500 (0:00:00.031) 0:02:07.007 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Configure firewall] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71 Saturday 21 December 2024 11:34:57 -0500 (0:00:00.033) 0:02:07.040 ***** ok: [managed-node2] => (item={'port': '8000/tcp', 'state': 'enabled'}) => { "__firewall_changed": false, "ansible_loop_var": "item", "changed": false, "item": { "port": "8000/tcp", "state": "enabled" } } ok: [managed-node2] => (item={'port': '9000/tcp', 'state': 'enabled'}) => { "__firewall_changed": false, "ansible_loop_var": "item", "changed": false, "item": { "port": "9000/tcp", "state": "enabled" } } TASK [fedora.linux_system_roles.firewall : Gather firewall config information] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:120 Saturday 21 December 2024 11:34:58 -0500 (0:00:01.049) 0:02:08.090 ***** skipping: [managed-node2] => (item={'port': '8000/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall | length == 1", "item": { "port": "8000/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item={'port': '9000/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall | length == 1", "item": { "port": "9000/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130 Saturday 21 December 2024 11:34:59 -0500 (0:00:00.046) 0:02:08.136 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall | length == 1", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:139 Saturday 21 December 2024 11:34:59 -0500 (0:00:00.037) 0:02:08.174 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:144 Saturday 21 December 2024 11:34:59 -0500 (0:00:00.049) 0:02:08.223 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:153 Saturday 21 December 2024 11:34:59 -0500 (0:00:00.049) 0:02:08.273 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Calculate what has changed] ********* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:163 Saturday 21 December 2024 11:34:59 -0500 (0:00:00.041) 0:02:08.315 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Show diffs] ************************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:169 Saturday 21 December 2024 11:34:59 -0500 (0:00:00.052) 0:02:08.367 ***** skipping: [managed-node2] => { "false_condition": "__firewall_previous_replaced | bool" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Saturday 21 December 2024 11:34:59 -0500 (0:00:00.152) 0:02:08.520 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Saturday 21 December 2024 11:34:59 -0500 (0:00:00.052) 0:02:08.572 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Saturday 21 December 2024 11:34:59 -0500 (0:00:00.053) 0:02:08.625 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Saturday 21 December 2024 11:34:59 -0500 (0:00:00.044) 0:02:08.670 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Saturday 21 December 2024 11:34:59 -0500 (0:00:00.046) 0:02:08.716 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 21 December 2024 11:34:59 -0500 (0:00:00.142) 0:02:08.858 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 21 December 2024 11:34:59 -0500 (0:00:00.056) 0:02:08.915 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:13 Saturday 21 December 2024 11:34:59 -0500 (0:00:00.044) 0:02:08.960 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 21 December 2024 11:34:59 -0500 (0:00:00.061) 0:02:09.021 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 21 December 2024 11:34:59 -0500 (0:00:00.041) 0:02:09.063 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 21 December 2024 11:34:59 -0500 (0:00:00.030) 0:02:09.093 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:18 Saturday 21 December 2024 11:35:00 -0500 (0:00:00.048) 0:02:09.142 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:34 Saturday 21 December 2024 11:35:00 -0500 (0:00:00.048) 0:02:09.190 ***** changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 21 December 2024 11:35:00 -0500 (0:00:00.504) 0:02:09.695 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 21 December 2024 11:35:00 -0500 (0:00:00.059) 0:02:09.755 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:13 Saturday 21 December 2024 11:35:00 -0500 (0:00:00.068) 0:02:09.823 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 21 December 2024 11:35:00 -0500 (0:00:00.099) 0:02:09.922 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 21 December 2024 11:35:00 -0500 (0:00:00.053) 0:02:09.976 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 21 December 2024 11:35:00 -0500 (0:00:00.084) 0:02:10.061 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:18 Saturday 21 December 2024 11:35:00 -0500 (0:00:00.033) 0:02:10.095 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:34 Saturday 21 December 2024 11:35:01 -0500 (0:00:00.030) 0:02:10.125 ***** changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 21 December 2024 11:35:01 -0500 (0:00:00.426) 0:02:10.551 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 21 December 2024 11:35:01 -0500 (0:00:00.037) 0:02:10.588 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:13 Saturday 21 December 2024 11:35:01 -0500 (0:00:00.047) 0:02:10.636 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 21 December 2024 11:35:01 -0500 (0:00:00.082) 0:02:10.719 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 21 December 2024 11:35:01 -0500 (0:00:00.064) 0:02:10.783 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 21 December 2024 11:35:01 -0500 (0:00:00.052) 0:02:10.835 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:18 Saturday 21 December 2024 11:35:01 -0500 (0:00:00.041) 0:02:10.876 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:34 Saturday 21 December 2024 11:35:01 -0500 (0:00:00.039) 0:02:10.916 ***** changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Saturday 21 December 2024 11:35:02 -0500 (0:00:00.450) 0:02:11.366 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Saturday 21 December 2024 11:35:02 -0500 (0:00:00.044) 0:02:11.411 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 21 December 2024 11:35:02 -0500 (0:00:00.237) 0:02:11.649 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "quadlet-demo.kube", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Unit]\nRequires=quadlet-demo-mysql.service\nAfter=quadlet-demo-mysql.service\n\n[Kube]\n# Point to the yaml file in the same directory\nYaml=quadlet-demo.yml\n# Use the quadlet-demo network\nNetwork=quadlet-demo.network\n# Publish the envoy proxy data port\nPublishPort=8000:8080\n# Publish the envoy proxy admin port\nPublishPort=9000:9901\n# Use the envoy proxy config map in the same directory\nConfigMap=envoy-proxy-configmap.yml", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 21 December 2024 11:35:02 -0500 (0:00:00.073) 0:02:11.722 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 21 December 2024 11:35:02 -0500 (0:00:00.081) 0:02:11.804 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 21 December 2024 11:35:02 -0500 (0:00:00.072) 0:02:11.876 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo", "__podman_quadlet_type": "kube", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 21 December 2024 11:35:02 -0500 (0:00:00.072) 0:02:11.948 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 21 December 2024 11:35:02 -0500 (0:00:00.154) 0:02:12.103 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 21 December 2024 11:35:03 -0500 (0:00:00.069) 0:02:12.173 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 21 December 2024 11:35:03 -0500 (0:00:00.055) 0:02:12.228 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 21 December 2024 11:35:03 -0500 (0:00:00.112) 0:02:12.341 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1734798569.6419525, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1734798549.6808388, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3128486407", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 21 December 2024 11:35:03 -0500 (0:00:00.498) 0:02:12.839 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 21 December 2024 11:35:03 -0500 (0:00:00.100) 0:02:12.940 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 21 December 2024 11:35:03 -0500 (0:00:00.078) 0:02:13.018 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 21 December 2024 11:35:03 -0500 (0:00:00.055) 0:02:13.074 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 21 December 2024 11:35:04 -0500 (0:00:00.064) 0:02:13.138 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 21 December 2024 11:35:04 -0500 (0:00:00.053) 0:02:13.191 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 21 December 2024 11:35:04 -0500 (0:00:00.051) 0:02:13.243 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 21 December 2024 11:35:04 -0500 (0:00:00.052) 0:02:13.296 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 21 December 2024 11:35:04 -0500 (0:00:00.044) 0:02:13.340 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": [ "quadlet-demo.yml" ], "__podman_service_name": "quadlet-demo.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 21 December 2024 11:35:04 -0500 (0:00:00.103) 0:02:13.444 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 21 December 2024 11:35:04 -0500 (0:00:00.056) 0:02:13.501 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 21 December 2024 11:35:04 -0500 (0:00:00.049) 0:02:13.550 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.kube", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 21 December 2024 11:35:04 -0500 (0:00:00.114) 0:02:13.664 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 21 December 2024 11:35:04 -0500 (0:00:00.089) 0:02:13.753 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 21 December 2024 11:35:04 -0500 (0:00:00.177) 0:02:13.931 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 21 December 2024 11:35:04 -0500 (0:00:00.114) 0:02:14.045 ***** changed: [managed-node2] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-demo.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2024-12-21 11:34:13 EST", "ActiveEnterTimestampMonotonic": "614777112", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "sysinit.target basic.target quadlet-demo-mysql.service network-online.target -.mount systemd-journald.socket system.slice quadlet-demo-network.service", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2024-12-21 11:34:12 EST", "AssertTimestampMonotonic": "614217806", "Before": "multi-user.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "274167000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2024-12-21 11:34:12 EST", "ConditionTimestampMonotonic": "614217803", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/quadlet-demo.service", "ControlGroupId": "11614", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-demo.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveCPUs": "0-1", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveMemoryNodes": "0", "EffectiveTasksMax": "22349", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "41566", "ExecMainStartTimestamp": "Sat 2024-12-21 11:34:13 EST", "ExecMainStartTimestampMonotonic": "614777069", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[Sat 2024-12-21 11:34:12 EST] ; stop_time=[n/a] ; pid=41557 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[Sat 2024-12-21 11:34:12 EST] ; stop_time=[n/a] ; pid=41557 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2024-12-21 11:34:12 EST", "InactiveExitTimestampMonotonic": "614219906", "InvocationID": "4453f410f120492aaa17025a468db57b", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "41566", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "2513186816", "MemoryCurrent": "2961408", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "24440832", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "quadlet-demo-mysql.service quadlet-demo-network.service sysinit.target -.mount system.slice", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo.kube", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2024-12-21 11:34:13 EST", "StateChangeTimestampMonotonic": "614777112", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "4", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 21 December 2024 11:35:06 -0500 (0:00:01.392) 0:02:15.438 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1734798851.5237656, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7", "ctime": 1734798851.5297656, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 524288881, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1734798851.250763, "nlink": 1, "path": "/etc/containers/systemd/quadlet-demo.kube", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 456, "uid": 0, "version": "273820366", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 21 December 2024 11:35:06 -0500 (0:00:00.444) 0:02:15.882 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 21 December 2024 11:35:06 -0500 (0:00:00.054) 0:02:15.937 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 21 December 2024 11:35:07 -0500 (0:00:00.362) 0:02:16.300 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 21 December 2024 11:35:07 -0500 (0:00:00.073) 0:02:16.374 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 21 December 2024 11:35:07 -0500 (0:00:00.032) 0:02:16.407 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 21 December 2024 11:35:07 -0500 (0:00:00.034) 0:02:16.442 ***** changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/quadlet-demo.kube", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 21 December 2024 11:35:07 -0500 (0:00:00.389) 0:02:16.831 ***** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 21 December 2024 11:35:08 -0500 (0:00:00.782) 0:02:17.614 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 21 December 2024 11:35:08 -0500 (0:00:00.054) 0:02:17.668 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 21 December 2024 11:35:08 -0500 (0:00:00.073) 0:02:17.741 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 21 December 2024 11:35:08 -0500 (0:00:00.050) 0:02:17.792 ***** changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.720799", "end": "2024-12-21 11:35:09.749585", "rc": 0, "start": "2024-12-21 11:35:09.028786" } STDOUT: fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 21 December 2024 11:35:09 -0500 (0:00:01.151) 0:02:18.944 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 21 December 2024 11:35:09 -0500 (0:00:00.092) 0:02:19.037 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 21 December 2024 11:35:09 -0500 (0:00:00.048) 0:02:19.085 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 21 December 2024 11:35:10 -0500 (0:00:00.050) 0:02:19.136 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 21 December 2024 11:35:10 -0500 (0:00:00.046) 0:02:19.182 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.032536", "end": "2024-12-21 11:35:10.444852", "rc": 0, "start": "2024-12-21 11:35:10.412316" } STDOUT: localhost/podman-pause 5.3.1-1733097600 e20c1c9f26b9 5 minutes ago 701 kB quay.io/linux-system-roles/mysql 5.6 dd3b2a5dcb48 3 years ago 308 MB quay.io/libpod/testimage 20210610 9f9ec7f2fdef 3 years ago 7.99 MB TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 21 December 2024 11:35:10 -0500 (0:00:00.435) 0:02:19.618 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.027854", "end": "2024-12-21 11:35:10.857649", "rc": 0, "start": "2024-12-21 11:35:10.829795" } STDOUT: local systemd-quadlet-demo-mysql local wp-pv-claim local envoy-proxy-config local envoy-certificates TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 21 December 2024 11:35:10 -0500 (0:00:00.432) 0:02:20.051 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.034953", "end": "2024-12-21 11:35:11.305132", "rc": 0, "start": "2024-12-21 11:35:11.270179" } STDOUT: 1e5d0ec512aa localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes ce41ff3061ea-service 6232f80e7e24 localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 0.0.0.0:15002->80/tcp d2f6641bb2ef-infra fb676e5504a3 quay.io/libpod/testimage:20210610 2 minutes ago Up 2 minutes 0.0.0.0:15002->80/tcp httpd2-httpd2 61716cd30289 localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes a8db008d7cc8-service 2907e4388cf2 localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 0.0.0.0:15003->80/tcp 26cc0fa7c809-infra a6061cf8dd2a quay.io/libpod/testimage:20210610 2 minutes ago Up 2 minutes 0.0.0.0:15003->80/tcp httpd3-httpd3 203d365a6d08 quay.io/linux-system-roles/mysql:5.6 mysqld About a minute ago Up About a minute (healthy) 3306/tcp quadlet-demo-mysql TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 21 December 2024 11:35:11 -0500 (0:00:00.490) 0:02:20.542 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.028154", "end": "2024-12-21 11:35:11.805078", "rc": 0, "start": "2024-12-21 11:35:11.776924" } STDOUT: podman podman-default-kube-network systemd-quadlet-demo TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 21 December 2024 11:35:11 -0500 (0:00:00.455) 0:02:20.998 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 21 December 2024 11:35:12 -0500 (0:00:00.439) 0:02:21.438 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 21 December 2024 11:35:12 -0500 (0:00:00.418) 0:02:21.857 ***** ok: [managed-node2] => { "ansible_facts": { "services": { "203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474-503f4df31c37e8b6.service": { "name": "203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474-503f4df31c37e8b6.service", "source": "systemd", "state": "running", "status": "transient" }, "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-demo-mysql-volume.service": { "name": "quadlet-demo-mysql-volume.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-demo-mysql.service": { "name": "quadlet-demo-mysql.service", "source": "systemd", "state": "running", "status": "generated" }, "quadlet-demo-network.service": { "name": "quadlet-demo-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 21 December 2024 11:35:15 -0500 (0:00:02.284) 0:02:24.141 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 21 December 2024 11:35:15 -0500 (0:00:00.033) 0:02:24.175 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "---\napiVersion: v1\nkind: PersistentVolumeClaim\nmetadata:\n name: wp-pv-claim\n labels:\n app: wordpress\nspec:\n accessModes:\n - ReadWriteOnce\n resources:\n requests:\n storage: 20Gi\n---\napiVersion: v1\nkind: Pod\nmetadata:\n name: quadlet-demo\nspec:\n containers:\n - name: wordpress\n image: quay.io/linux-system-roles/wordpress:4.8-apache\n env:\n - name: WORDPRESS_DB_HOST\n value: quadlet-demo-mysql\n - name: WORDPRESS_DB_PASSWORD\n valueFrom:\n secretKeyRef:\n name: mysql-root-password-kube\n key: password\n volumeMounts:\n - name: wordpress-persistent-storage\n mountPath: /var/www/html\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n - name: envoy\n image: quay.io/linux-system-roles/envoyproxy:v1.25.0\n volumeMounts:\n - name: config-volume\n mountPath: /etc/envoy\n - name: certificates\n mountPath: /etc/envoy-certificates\n env:\n - name: ENVOY_UID\n value: \"0\"\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n volumes:\n - name: config-volume\n configMap:\n name: envoy-proxy-config\n - name: certificates\n secret:\n secretName: envoy-certificates\n - name: wordpress-persistent-storage\n persistentVolumeClaim:\n claimName: wp-pv-claim\n - name: www # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3\n - name: create # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3-create\n", "__podman_quadlet_template_src": "quadlet-demo.yml.j2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 21 December 2024 11:35:15 -0500 (0:00:00.137) 0:02:24.312 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 21 December 2024 11:35:15 -0500 (0:00:00.070) 0:02:24.383 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_str", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 21 December 2024 11:35:15 -0500 (0:00:00.057) 0:02:24.441 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo", "__podman_quadlet_type": "yml", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 21 December 2024 11:35:15 -0500 (0:00:00.082) 0:02:24.523 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 21 December 2024 11:35:15 -0500 (0:00:00.120) 0:02:24.644 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 21 December 2024 11:35:15 -0500 (0:00:00.066) 0:02:24.711 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 21 December 2024 11:35:15 -0500 (0:00:00.067) 0:02:24.779 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 21 December 2024 11:35:15 -0500 (0:00:00.074) 0:02:24.853 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1734798569.6419525, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1734798549.6808388, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3128486407", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 21 December 2024 11:35:16 -0500 (0:00:00.427) 0:02:25.280 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 21 December 2024 11:35:16 -0500 (0:00:00.058) 0:02:25.339 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 21 December 2024 11:35:16 -0500 (0:00:00.121) 0:02:25.461 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 21 December 2024 11:35:16 -0500 (0:00:00.060) 0:02:25.521 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 21 December 2024 11:35:16 -0500 (0:00:00.060) 0:02:25.581 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 21 December 2024 11:35:16 -0500 (0:00:00.064) 0:02:25.646 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 21 December 2024 11:35:16 -0500 (0:00:00.059) 0:02:25.705 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 21 December 2024 11:35:16 -0500 (0:00:00.059) 0:02:25.765 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 21 December 2024 11:35:16 -0500 (0:00:00.058) 0:02:25.824 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 21 December 2024 11:35:16 -0500 (0:00:00.095) 0:02:25.920 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 21 December 2024 11:35:16 -0500 (0:00:00.044) 0:02:25.964 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 21 December 2024 11:35:16 -0500 (0:00:00.039) 0:02:26.004 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.yml", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 21 December 2024 11:35:16 -0500 (0:00:00.088) 0:02:26.092 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 21 December 2024 11:35:17 -0500 (0:00:00.041) 0:02:26.134 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 21 December 2024 11:35:17 -0500 (0:00:00.077) 0:02:26.212 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 21 December 2024 11:35:17 -0500 (0:00:00.034) 0:02:26.246 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_service_name | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 21 December 2024 11:35:17 -0500 (0:00:00.039) 0:02:26.286 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1734798831.9775786, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "998dccde0483b1654327a46ddd89cbaa47650370", "ctime": 1734798829.372554, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 469762265, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1734798828.93255, "nlink": 1, "path": "/etc/containers/systemd/quadlet-demo.yml", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1605, "uid": 0, "version": "1105963106", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 21 December 2024 11:35:17 -0500 (0:00:00.477) 0:02:26.764 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 21 December 2024 11:35:17 -0500 (0:00:00.073) 0:02:26.837 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 21 December 2024 11:35:18 -0500 (0:00:00.394) 0:02:27.232 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 21 December 2024 11:35:18 -0500 (0:00:00.064) 0:02:27.297 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 21 December 2024 11:35:18 -0500 (0:00:00.078) 0:02:27.375 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 21 December 2024 11:35:18 -0500 (0:00:00.061) 0:02:27.437 ***** changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/quadlet-demo.yml", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 21 December 2024 11:35:18 -0500 (0:00:00.512) 0:02:27.949 ***** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 21 December 2024 11:35:19 -0500 (0:00:00.850) 0:02:28.800 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 21 December 2024 11:35:19 -0500 (0:00:00.041) 0:02:28.842 ***** changed: [managed-node2] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 21 December 2024 11:35:21 -0500 (0:00:01.377) 0:02:30.220 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 21 December 2024 11:35:21 -0500 (0:00:00.077) 0:02:30.297 ***** changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.028456", "end": "2024-12-21 11:35:21.561652", "rc": 0, "start": "2024-12-21 11:35:21.533196" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 21 December 2024 11:35:21 -0500 (0:00:00.518) 0:02:30.816 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 21 December 2024 11:35:21 -0500 (0:00:00.185) 0:02:31.001 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 21 December 2024 11:35:21 -0500 (0:00:00.065) 0:02:31.067 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 21 December 2024 11:35:22 -0500 (0:00:00.057) 0:02:31.125 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 21 December 2024 11:35:22 -0500 (0:00:00.104) 0:02:31.229 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.031575", "end": "2024-12-21 11:35:22.471501", "rc": 0, "start": "2024-12-21 11:35:22.439926" } STDOUT: localhost/podman-pause 5.3.1-1733097600 e20c1c9f26b9 5 minutes ago 701 kB quay.io/linux-system-roles/mysql 5.6 dd3b2a5dcb48 3 years ago 308 MB quay.io/libpod/testimage 20210610 9f9ec7f2fdef 3 years ago 7.99 MB TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 21 December 2024 11:35:22 -0500 (0:00:00.427) 0:02:31.656 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.028064", "end": "2024-12-21 11:35:22.894516", "rc": 0, "start": "2024-12-21 11:35:22.866452" } STDOUT: local systemd-quadlet-demo-mysql TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 21 December 2024 11:35:22 -0500 (0:00:00.419) 0:02:32.076 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.036478", "end": "2024-12-21 11:35:23.346292", "rc": 0, "start": "2024-12-21 11:35:23.309814" } STDOUT: 1e5d0ec512aa localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes ce41ff3061ea-service 6232f80e7e24 localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp d2f6641bb2ef-infra fb676e5504a3 quay.io/libpod/testimage:20210610 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp httpd2-httpd2 61716cd30289 localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes a8db008d7cc8-service 2907e4388cf2 localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 0.0.0.0:15003->80/tcp 26cc0fa7c809-infra a6061cf8dd2a quay.io/libpod/testimage:20210610 2 minutes ago Up 2 minutes 0.0.0.0:15003->80/tcp httpd3-httpd3 203d365a6d08 quay.io/linux-system-roles/mysql:5.6 mysqld About a minute ago Up About a minute (healthy) 3306/tcp quadlet-demo-mysql TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 21 December 2024 11:35:23 -0500 (0:00:00.475) 0:02:32.552 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.027765", "end": "2024-12-21 11:35:23.836260", "rc": 0, "start": "2024-12-21 11:35:23.808495" } STDOUT: podman podman-default-kube-network systemd-quadlet-demo TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 21 December 2024 11:35:23 -0500 (0:00:00.476) 0:02:33.028 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 21 December 2024 11:35:24 -0500 (0:00:00.424) 0:02:33.453 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 21 December 2024 11:35:24 -0500 (0:00:00.431) 0:02:33.884 ***** ok: [managed-node2] => { "ansible_facts": { "services": { "203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474-503f4df31c37e8b6.service": { "name": "203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474-503f4df31c37e8b6.service", "source": "systemd", "state": "stopped", "status": "failed" }, "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-demo-mysql-volume.service": { "name": "quadlet-demo-mysql-volume.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-demo-mysql.service": { "name": "quadlet-demo-mysql.service", "source": "systemd", "state": "running", "status": "generated" }, "quadlet-demo-network.service": { "name": "quadlet-demo-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 21 December 2024 11:35:26 -0500 (0:00:02.033) 0:02:35.918 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 21 December 2024 11:35:26 -0500 (0:00:00.033) 0:02:35.952 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "envoy-proxy-configmap.yml", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "---\napiVersion: v1\nkind: ConfigMap\nmetadata:\n name: envoy-proxy-config\ndata:\n envoy.yaml: |\n admin:\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 9901\n\n static_resources:\n listeners:\n - name: listener_0\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 8080\n filter_chains:\n - filters:\n - name: envoy.filters.network.http_connection_manager\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager\n stat_prefix: ingress_http\n codec_type: AUTO\n route_config:\n name: local_route\n virtual_hosts:\n - name: local_service\n domains: [\"*\"]\n routes:\n - match:\n prefix: \"/\"\n route:\n cluster: backend\n http_filters:\n - name: envoy.filters.http.router\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.http.router.v3.Router\n transport_socket:\n name: envoy.transport_sockets.tls\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.transport_sockets.tls.v3.DownstreamTlsContext\n common_tls_context:\n tls_certificates:\n - certificate_chain:\n filename: /etc/envoy-certificates/certificate.pem\n private_key:\n filename: /etc/envoy-certificates/certificate.key\n clusters:\n - name: backend\n connect_timeout: 5s\n type: STATIC\n dns_refresh_rate: 1800s\n lb_policy: ROUND_ROBIN\n load_assignment:\n cluster_name: backend\n endpoints:\n - lb_endpoints:\n - endpoint:\n address:\n socket_address:\n address: 127.0.0.1\n port_value: 80", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 21 December 2024 11:35:26 -0500 (0:00:00.045) 0:02:35.997 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 21 December 2024 11:35:26 -0500 (0:00:00.040) 0:02:36.038 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 21 December 2024 11:35:26 -0500 (0:00:00.034) 0:02:36.073 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "envoy-proxy-configmap", "__podman_quadlet_type": "yml", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 21 December 2024 11:35:27 -0500 (0:00:00.054) 0:02:36.127 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 21 December 2024 11:35:27 -0500 (0:00:00.095) 0:02:36.223 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 21 December 2024 11:35:27 -0500 (0:00:00.104) 0:02:36.327 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 21 December 2024 11:35:27 -0500 (0:00:00.042) 0:02:36.370 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 21 December 2024 11:35:27 -0500 (0:00:00.051) 0:02:36.422 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1734798569.6419525, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1734798549.6808388, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3128486407", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 21 December 2024 11:35:27 -0500 (0:00:00.422) 0:02:36.844 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 21 December 2024 11:35:27 -0500 (0:00:00.060) 0:02:36.905 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 21 December 2024 11:35:27 -0500 (0:00:00.060) 0:02:36.965 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 21 December 2024 11:35:27 -0500 (0:00:00.060) 0:02:37.026 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 21 December 2024 11:35:27 -0500 (0:00:00.057) 0:02:37.083 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 21 December 2024 11:35:28 -0500 (0:00:00.059) 0:02:37.143 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 21 December 2024 11:35:28 -0500 (0:00:00.059) 0:02:37.202 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 21 December 2024 11:35:28 -0500 (0:00:00.058) 0:02:37.261 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 21 December 2024 11:35:28 -0500 (0:00:00.059) 0:02:37.320 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 21 December 2024 11:35:28 -0500 (0:00:00.097) 0:02:37.418 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 21 December 2024 11:35:28 -0500 (0:00:00.061) 0:02:37.480 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 21 December 2024 11:35:28 -0500 (0:00:00.056) 0:02:37.537 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/envoy-proxy-configmap.yml", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 21 December 2024 11:35:28 -0500 (0:00:00.094) 0:02:37.632 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 21 December 2024 11:35:28 -0500 (0:00:00.047) 0:02:37.680 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 21 December 2024 11:35:28 -0500 (0:00:00.128) 0:02:37.808 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 21 December 2024 11:35:28 -0500 (0:00:00.033) 0:02:37.841 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_service_name | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 21 December 2024 11:35:28 -0500 (0:00:00.040) 0:02:37.882 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1734798853.0737805, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "d681c7d56f912150d041873e880818b22a90c188", "ctime": 1734798824.98251, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 427819222, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1734798824.7105064, "nlink": 1, "path": "/etc/containers/systemd/envoy-proxy-configmap.yml", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 2102, "uid": 0, "version": "3455277304", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 21 December 2024 11:35:29 -0500 (0:00:00.407) 0:02:38.289 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 21 December 2024 11:35:29 -0500 (0:00:00.092) 0:02:38.381 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 21 December 2024 11:35:29 -0500 (0:00:00.383) 0:02:38.765 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 21 December 2024 11:35:29 -0500 (0:00:00.051) 0:02:38.817 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 21 December 2024 11:35:29 -0500 (0:00:00.068) 0:02:38.885 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 21 December 2024 11:35:29 -0500 (0:00:00.062) 0:02:38.948 ***** changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/envoy-proxy-configmap.yml", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 21 December 2024 11:35:30 -0500 (0:00:00.476) 0:02:39.424 ***** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 21 December 2024 11:35:31 -0500 (0:00:00.882) 0:02:40.306 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 21 December 2024 11:35:31 -0500 (0:00:00.093) 0:02:40.400 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 21 December 2024 11:35:31 -0500 (0:00:00.121) 0:02:40.521 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 21 December 2024 11:35:31 -0500 (0:00:00.073) 0:02:40.595 ***** changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.030368", "end": "2024-12-21 11:35:31.861329", "rc": 0, "start": "2024-12-21 11:35:31.830961" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 21 December 2024 11:35:31 -0500 (0:00:00.497) 0:02:41.092 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 21 December 2024 11:35:32 -0500 (0:00:00.061) 0:02:41.154 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 21 December 2024 11:35:32 -0500 (0:00:00.043) 0:02:41.197 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 21 December 2024 11:35:32 -0500 (0:00:00.055) 0:02:41.253 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 21 December 2024 11:35:32 -0500 (0:00:00.063) 0:02:41.316 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.031571", "end": "2024-12-21 11:35:32.582819", "rc": 0, "start": "2024-12-21 11:35:32.551248" } STDOUT: localhost/podman-pause 5.3.1-1733097600 e20c1c9f26b9 5 minutes ago 701 kB quay.io/linux-system-roles/mysql 5.6 dd3b2a5dcb48 3 years ago 308 MB quay.io/libpod/testimage 20210610 9f9ec7f2fdef 3 years ago 7.99 MB TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 21 December 2024 11:35:32 -0500 (0:00:00.481) 0:02:41.798 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.028891", "end": "2024-12-21 11:35:33.068339", "rc": 0, "start": "2024-12-21 11:35:33.039448" } STDOUT: local systemd-quadlet-demo-mysql TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 21 December 2024 11:35:33 -0500 (0:00:00.475) 0:02:42.273 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.035527", "end": "2024-12-21 11:35:33.555974", "rc": 0, "start": "2024-12-21 11:35:33.520447" } STDOUT: 1e5d0ec512aa localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes ce41ff3061ea-service 6232f80e7e24 localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp d2f6641bb2ef-infra fb676e5504a3 quay.io/libpod/testimage:20210610 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp httpd2-httpd2 61716cd30289 localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes a8db008d7cc8-service 2907e4388cf2 localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 0.0.0.0:15003->80/tcp 26cc0fa7c809-infra a6061cf8dd2a quay.io/libpod/testimage:20210610 2 minutes ago Up 2 minutes 0.0.0.0:15003->80/tcp httpd3-httpd3 203d365a6d08 quay.io/linux-system-roles/mysql:5.6 mysqld About a minute ago Up About a minute (healthy) 3306/tcp quadlet-demo-mysql TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 21 December 2024 11:35:33 -0500 (0:00:00.484) 0:02:42.757 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.029440", "end": "2024-12-21 11:35:34.014597", "rc": 0, "start": "2024-12-21 11:35:33.985157" } STDOUT: podman podman-default-kube-network systemd-quadlet-demo TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 21 December 2024 11:35:34 -0500 (0:00:00.441) 0:02:43.199 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 21 December 2024 11:35:34 -0500 (0:00:00.444) 0:02:43.644 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 21 December 2024 11:35:35 -0500 (0:00:00.479) 0:02:44.123 ***** ok: [managed-node2] => { "ansible_facts": { "services": { "203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474-503f4df31c37e8b6.service": { "name": "203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474-503f4df31c37e8b6.service", "source": "systemd", "state": "stopped", "status": "failed" }, "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-demo-mysql-volume.service": { "name": "quadlet-demo-mysql-volume.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-demo-mysql.service": { "name": "quadlet-demo-mysql.service", "source": "systemd", "state": "running", "status": "generated" }, "quadlet-demo-network.service": { "name": "quadlet-demo-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 21 December 2024 11:35:38 -0500 (0:00:03.175) 0:02:47.299 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 21 December 2024 11:35:38 -0500 (0:00:00.045) 0:02:47.344 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Container]\nImage=quay.io/linux-system-roles/mysql:5.6\nContainerName=quadlet-demo-mysql\nVolume=quadlet-demo-mysql.volume:/var/lib/mysql\nVolume=/tmp/quadlet_demo:/var/lib/quadlet_demo:Z\nNetwork=quadlet-demo.network\nSecret=mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD\nHealthCmd=/bin/true\nHealthOnFailure=kill\n", "__podman_quadlet_template_src": "quadlet-demo-mysql.container.j2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 21 December 2024 11:35:38 -0500 (0:00:00.163) 0:02:47.507 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 21 December 2024 11:35:38 -0500 (0:00:00.041) 0:02:47.549 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_str", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 21 December 2024 11:35:38 -0500 (0:00:00.082) 0:02:47.632 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo-mysql", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 21 December 2024 11:35:38 -0500 (0:00:00.058) 0:02:47.691 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 21 December 2024 11:35:38 -0500 (0:00:00.108) 0:02:47.800 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 21 December 2024 11:35:38 -0500 (0:00:00.066) 0:02:47.866 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 21 December 2024 11:35:38 -0500 (0:00:00.079) 0:02:47.945 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 21 December 2024 11:35:38 -0500 (0:00:00.092) 0:02:48.038 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1734798569.6419525, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1734798549.6808388, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3128486407", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 21 December 2024 11:35:39 -0500 (0:00:00.452) 0:02:48.490 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 21 December 2024 11:35:39 -0500 (0:00:00.057) 0:02:48.547 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 21 December 2024 11:35:39 -0500 (0:00:00.045) 0:02:48.593 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 21 December 2024 11:35:39 -0500 (0:00:00.049) 0:02:48.642 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 21 December 2024 11:35:39 -0500 (0:00:00.036) 0:02:48.679 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 21 December 2024 11:35:39 -0500 (0:00:00.032) 0:02:48.711 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 21 December 2024 11:35:39 -0500 (0:00:00.033) 0:02:48.745 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 21 December 2024 11:35:39 -0500 (0:00:00.032) 0:02:48.777 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 21 December 2024 11:35:39 -0500 (0:00:00.032) 0:02:48.810 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-demo-mysql.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 21 December 2024 11:35:39 -0500 (0:00:00.130) 0:02:48.941 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 21 December 2024 11:35:39 -0500 (0:00:00.059) 0:02:49.000 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 21 December 2024 11:35:39 -0500 (0:00:00.061) 0:02:49.062 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.container", "__podman_volumes": [ "/tmp/quadlet_demo" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 21 December 2024 11:35:40 -0500 (0:00:00.172) 0:02:49.235 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 21 December 2024 11:35:40 -0500 (0:00:00.073) 0:02:49.308 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 21 December 2024 11:35:40 -0500 (0:00:00.132) 0:02:49.440 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 21 December 2024 11:35:40 -0500 (0:00:00.051) 0:02:49.492 ***** changed: [managed-node2] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-demo-mysql.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2024-12-21 11:33:41 EST", "ActiveEnterTimestampMonotonic": "583100178", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "systemd-journald.socket system.slice basic.target sysinit.target quadlet-demo-network.service quadlet-demo-mysql-volume.service tmp.mount network-online.target -.mount", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2024-12-21 11:33:41 EST", "AssertTimestampMonotonic": "582831579", "Before": "multi-user.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "2931718000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2024-12-21 11:33:41 EST", "ConditionTimestampMonotonic": "582831575", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/quadlet-demo-mysql.service", "ControlGroupId": "10581", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuset io memory pids", "Description": "quadlet-demo-mysql.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveCPUs": "0-1", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveMemoryNodes": "0", "EffectiveTasksMax": "22349", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "38084", "ExecMainStartTimestamp": "Sat 2024-12-21 11:33:41 EST", "ExecMainStartTimestampMonotonic": "583100139", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo-mysql.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo-mysql.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2024-12-21 11:33:41 EST", "InactiveExitTimestampMonotonic": "582840410", "InvocationID": "c2e881cb4cd944c08132370172f22d84", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "38084", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "2622660608", "MemoryCurrent": "600072192", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "643629056", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo-mysql.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "continue", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "-.mount quadlet-demo-network.service sysinit.target quadlet-demo-mysql-volume.service system.slice", "RequiresMountsFor": "/tmp/quadlet_demo /run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.container", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2024-12-21 11:33:41 EST", "StateChangeTimestampMonotonic": "583100178", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo-mysql", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "23", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 21 December 2024 11:35:43 -0500 (0:00:02.881) 0:02:52.374 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1734798820.2164426, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4", "ctime": 1734798820.2214427, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 272629994, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1734798819.9414387, "nlink": 1, "path": "/etc/containers/systemd/quadlet-demo-mysql.container", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 363, "uid": 0, "version": "516003812", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 21 December 2024 11:35:43 -0500 (0:00:00.460) 0:02:52.834 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 21 December 2024 11:35:43 -0500 (0:00:00.115) 0:02:52.950 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 21 December 2024 11:35:44 -0500 (0:00:00.465) 0:02:53.416 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 21 December 2024 11:35:44 -0500 (0:00:00.082) 0:02:53.499 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 21 December 2024 11:35:44 -0500 (0:00:00.042) 0:02:53.541 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 21 December 2024 11:35:44 -0500 (0:00:00.043) 0:02:53.584 ***** changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/quadlet-demo-mysql.container", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 21 December 2024 11:35:44 -0500 (0:00:00.423) 0:02:54.008 ***** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 21 December 2024 11:35:45 -0500 (0:00:00.841) 0:02:54.849 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 21 December 2024 11:35:46 -0500 (0:00:00.486) 0:02:55.336 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 21 December 2024 11:35:46 -0500 (0:00:00.135) 0:02:55.472 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 21 December 2024 11:35:46 -0500 (0:00:00.062) 0:02:55.535 ***** changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.255527", "end": "2024-12-21 11:35:47.035441", "rc": 0, "start": "2024-12-21 11:35:46.779914" } STDOUT: dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 21 December 2024 11:35:47 -0500 (0:00:00.677) 0:02:56.212 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 21 December 2024 11:35:47 -0500 (0:00:00.070) 0:02:56.283 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 21 December 2024 11:35:47 -0500 (0:00:00.039) 0:02:56.322 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 21 December 2024 11:35:47 -0500 (0:00:00.035) 0:02:56.358 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 21 December 2024 11:35:47 -0500 (0:00:00.033) 0:02:56.391 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.030666", "end": "2024-12-21 11:35:47.621819", "rc": 0, "start": "2024-12-21 11:35:47.591153" } STDOUT: localhost/podman-pause 5.3.1-1733097600 e20c1c9f26b9 5 minutes ago 701 kB quay.io/libpod/testimage 20210610 9f9ec7f2fdef 3 years ago 7.99 MB TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 21 December 2024 11:35:47 -0500 (0:00:00.409) 0:02:56.800 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.028370", "end": "2024-12-21 11:35:48.031156", "rc": 0, "start": "2024-12-21 11:35:48.002786" } STDOUT: local systemd-quadlet-demo-mysql TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 21 December 2024 11:35:48 -0500 (0:00:00.407) 0:02:57.208 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.034588", "end": "2024-12-21 11:35:48.445131", "rc": 0, "start": "2024-12-21 11:35:48.410543" } STDOUT: 1e5d0ec512aa localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes ce41ff3061ea-service 6232f80e7e24 localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp d2f6641bb2ef-infra fb676e5504a3 quay.io/libpod/testimage:20210610 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp httpd2-httpd2 61716cd30289 localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes a8db008d7cc8-service 2907e4388cf2 localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 0.0.0.0:15003->80/tcp 26cc0fa7c809-infra a6061cf8dd2a quay.io/libpod/testimage:20210610 3 minutes ago Up 3 minutes 0.0.0.0:15003->80/tcp httpd3-httpd3 TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 21 December 2024 11:35:48 -0500 (0:00:00.416) 0:02:57.625 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.027392", "end": "2024-12-21 11:35:48.853606", "rc": 0, "start": "2024-12-21 11:35:48.826214" } STDOUT: podman podman-default-kube-network systemd-quadlet-demo TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 21 December 2024 11:35:48 -0500 (0:00:00.407) 0:02:58.032 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 21 December 2024 11:35:49 -0500 (0:00:00.423) 0:02:58.455 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 21 December 2024 11:35:49 -0500 (0:00:00.458) 0:02:58.914 ***** ok: [managed-node2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "running", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-demo-mysql-volume.service": { "name": "quadlet-demo-mysql-volume.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-demo-network.service": { "name": "quadlet-demo-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 21 December 2024 11:35:51 -0500 (0:00:02.140) 0:03:01.055 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 21 December 2024 11:35:52 -0500 (0:00:00.056) 0:03:01.111 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "quadlet-demo-mysql.volume", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Volume]", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 21 December 2024 11:35:52 -0500 (0:00:00.075) 0:03:01.186 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 21 December 2024 11:35:52 -0500 (0:00:00.053) 0:03:01.240 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 21 December 2024 11:35:52 -0500 (0:00:00.042) 0:03:01.282 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo-mysql", "__podman_quadlet_type": "volume", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 21 December 2024 11:35:52 -0500 (0:00:00.073) 0:03:01.356 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 21 December 2024 11:35:52 -0500 (0:00:00.075) 0:03:01.431 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 21 December 2024 11:35:52 -0500 (0:00:00.046) 0:03:01.478 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 21 December 2024 11:35:52 -0500 (0:00:00.043) 0:03:01.521 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 21 December 2024 11:35:52 -0500 (0:00:00.046) 0:03:01.567 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1734798569.6419525, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1734798549.6808388, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3128486407", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 21 December 2024 11:35:52 -0500 (0:00:00.403) 0:03:01.970 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 21 December 2024 11:35:52 -0500 (0:00:00.056) 0:03:02.027 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 21 December 2024 11:35:52 -0500 (0:00:00.053) 0:03:02.080 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 21 December 2024 11:35:53 -0500 (0:00:00.041) 0:03:02.122 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 21 December 2024 11:35:53 -0500 (0:00:00.039) 0:03:02.162 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 21 December 2024 11:35:53 -0500 (0:00:00.039) 0:03:02.202 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 21 December 2024 11:35:53 -0500 (0:00:00.087) 0:03:02.289 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 21 December 2024 11:35:53 -0500 (0:00:00.041) 0:03:02.331 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 21 December 2024 11:35:53 -0500 (0:00:00.034) 0:03:02.365 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-demo-mysql-volume.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 21 December 2024 11:35:53 -0500 (0:00:00.059) 0:03:02.424 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 21 December 2024 11:35:53 -0500 (0:00:00.041) 0:03:02.466 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 21 December 2024 11:35:53 -0500 (0:00:00.052) 0:03:02.519 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.volume", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 21 December 2024 11:35:53 -0500 (0:00:00.097) 0:03:02.617 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 21 December 2024 11:35:53 -0500 (0:00:00.050) 0:03:02.668 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 21 December 2024 11:35:53 -0500 (0:00:00.082) 0:03:02.750 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 21 December 2024 11:35:53 -0500 (0:00:00.034) 0:03:02.785 ***** changed: [managed-node2] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-demo-mysql-volume.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2024-12-21 11:33:30 EST", "ActiveEnterTimestampMonotonic": "571403680", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "basic.target network-online.target system.slice sysinit.target -.mount systemd-journald.socket", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2024-12-21 11:33:30 EST", "AssertTimestampMonotonic": "571354788", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2024-12-21 11:33:30 EST", "ConditionTimestampMonotonic": "571354784", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-demo-mysql-volume.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveTasksMax": "22349", "ExecMainCode": "1", "ExecMainExitTimestamp": "Sat 2024-12-21 11:33:30 EST", "ExecMainExitTimestampMonotonic": "571403498", "ExecMainHandoffTimestamp": "Sat 2024-12-21 11:33:30 EST", "ExecMainHandoffTimestampMonotonic": "571365460", "ExecMainPID": "36748", "ExecMainStartTimestamp": "Sat 2024-12-21 11:33:30 EST", "ExecMainStartTimestampMonotonic": "571355719", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo-mysql-volume.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo-mysql-volume.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2024-12-21 11:33:30 EST", "InactiveExitTimestampMonotonic": "571356707", "InvocationID": "daa15231dbec4b6f9f22740305eb1cd9", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3146358784", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo-mysql-volume.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "sysinit.target system.slice -.mount", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.volume", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2024-12-21 11:33:30 EST", "StateChangeTimestampMonotonic": "571403680", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "exited", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo-mysql-volume", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 21 December 2024 11:35:54 -0500 (0:00:00.816) 0:03:03.601 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1734798808.5702772, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "585f8cbdf0ec73000f9227dcffbef71e9552ea4a", "ctime": 1734798808.5762773, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 662700303, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1734798808.2442727, "nlink": 1, "path": "/etc/containers/systemd/quadlet-demo-mysql.volume", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 9, "uid": 0, "version": "2318144409", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 21 December 2024 11:35:54 -0500 (0:00:00.384) 0:03:03.986 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 21 December 2024 11:35:54 -0500 (0:00:00.061) 0:03:04.048 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 21 December 2024 11:35:55 -0500 (0:00:00.372) 0:03:04.420 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 21 December 2024 11:35:55 -0500 (0:00:00.102) 0:03:04.522 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 21 December 2024 11:35:55 -0500 (0:00:00.036) 0:03:04.559 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 21 December 2024 11:35:55 -0500 (0:00:00.036) 0:03:04.596 ***** changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/quadlet-demo-mysql.volume", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 21 December 2024 11:35:55 -0500 (0:00:00.394) 0:03:04.991 ***** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 21 December 2024 11:35:56 -0500 (0:00:00.752) 0:03:05.743 ***** changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 21 December 2024 11:35:57 -0500 (0:00:00.431) 0:03:06.175 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 21 December 2024 11:35:57 -0500 (0:00:00.046) 0:03:06.222 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 21 December 2024 11:35:57 -0500 (0:00:00.035) 0:03:06.257 ***** changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.028431", "end": "2024-12-21 11:35:57.492176", "rc": 0, "start": "2024-12-21 11:35:57.463745" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 21 December 2024 11:35:57 -0500 (0:00:00.433) 0:03:06.690 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 21 December 2024 11:35:57 -0500 (0:00:00.122) 0:03:06.812 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 21 December 2024 11:35:57 -0500 (0:00:00.073) 0:03:06.886 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 21 December 2024 11:35:57 -0500 (0:00:00.060) 0:03:06.947 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 21 December 2024 11:35:57 -0500 (0:00:00.055) 0:03:07.003 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.032430", "end": "2024-12-21 11:35:58.281684", "rc": 0, "start": "2024-12-21 11:35:58.249254" } STDOUT: localhost/podman-pause 5.3.1-1733097600 e20c1c9f26b9 6 minutes ago 701 kB quay.io/libpod/testimage 20210610 9f9ec7f2fdef 3 years ago 7.99 MB TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 21 December 2024 11:35:58 -0500 (0:00:00.460) 0:03:07.463 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.028327", "end": "2024-12-21 11:35:58.696429", "rc": 0, "start": "2024-12-21 11:35:58.668102" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 21 December 2024 11:35:58 -0500 (0:00:00.432) 0:03:07.896 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.035177", "end": "2024-12-21 11:35:59.170421", "rc": 0, "start": "2024-12-21 11:35:59.135244" } STDOUT: 1e5d0ec512aa localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes ce41ff3061ea-service 6232f80e7e24 localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp d2f6641bb2ef-infra fb676e5504a3 quay.io/libpod/testimage:20210610 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp httpd2-httpd2 61716cd30289 localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes a8db008d7cc8-service 2907e4388cf2 localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 0.0.0.0:15003->80/tcp 26cc0fa7c809-infra a6061cf8dd2a quay.io/libpod/testimage:20210610 3 minutes ago Up 3 minutes 0.0.0.0:15003->80/tcp httpd3-httpd3 TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 21 December 2024 11:35:59 -0500 (0:00:00.479) 0:03:08.376 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.027850", "end": "2024-12-21 11:35:59.637064", "rc": 0, "start": "2024-12-21 11:35:59.609214" } STDOUT: podman podman-default-kube-network systemd-quadlet-demo TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 21 December 2024 11:35:59 -0500 (0:00:00.488) 0:03:08.865 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 21 December 2024 11:36:00 -0500 (0:00:00.429) 0:03:09.294 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 21 December 2024 11:36:00 -0500 (0:00:00.454) 0:03:09.749 ***** ok: [managed-node2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-demo-network.service": { "name": "quadlet-demo-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 21 December 2024 11:36:02 -0500 (0:00:02.096) 0:03:11.845 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 21 December 2024 11:36:02 -0500 (0:00:00.041) 0:03:11.887 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "quadlet-demo.network", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Network]\nSubnet=192.168.30.0/24\nGateway=192.168.30.1\nLabel=app=wordpress", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 21 December 2024 11:36:02 -0500 (0:00:00.076) 0:03:11.964 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 21 December 2024 11:36:02 -0500 (0:00:00.068) 0:03:12.033 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 21 December 2024 11:36:02 -0500 (0:00:00.057) 0:03:12.090 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo", "__podman_quadlet_type": "network", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 21 December 2024 11:36:03 -0500 (0:00:00.066) 0:03:12.156 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 21 December 2024 11:36:03 -0500 (0:00:00.099) 0:03:12.255 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 21 December 2024 11:36:03 -0500 (0:00:00.041) 0:03:12.297 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 21 December 2024 11:36:03 -0500 (0:00:00.040) 0:03:12.337 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 21 December 2024 11:36:03 -0500 (0:00:00.046) 0:03:12.383 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1734798569.6419525, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1734798549.6808388, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3128486407", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 21 December 2024 11:36:03 -0500 (0:00:00.438) 0:03:12.822 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 21 December 2024 11:36:03 -0500 (0:00:00.039) 0:03:12.862 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 21 December 2024 11:36:03 -0500 (0:00:00.094) 0:03:12.956 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 21 December 2024 11:36:03 -0500 (0:00:00.035) 0:03:12.991 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 21 December 2024 11:36:03 -0500 (0:00:00.034) 0:03:13.025 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 21 December 2024 11:36:03 -0500 (0:00:00.038) 0:03:13.064 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 21 December 2024 11:36:04 -0500 (0:00:00.055) 0:03:13.120 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 21 December 2024 11:36:04 -0500 (0:00:00.048) 0:03:13.169 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 21 December 2024 11:36:04 -0500 (0:00:00.060) 0:03:13.229 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-demo-network.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 21 December 2024 11:36:04 -0500 (0:00:00.091) 0:03:13.321 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 21 December 2024 11:36:04 -0500 (0:00:00.043) 0:03:13.365 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 21 December 2024 11:36:04 -0500 (0:00:00.041) 0:03:13.406 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.network", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 21 December 2024 11:36:04 -0500 (0:00:00.077) 0:03:13.483 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 21 December 2024 11:36:04 -0500 (0:00:00.047) 0:03:13.531 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 21 December 2024 11:36:04 -0500 (0:00:00.086) 0:03:13.617 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 21 December 2024 11:36:04 -0500 (0:00:00.038) 0:03:13.655 ***** changed: [managed-node2] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-demo-network.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2024-12-21 11:33:24 EST", "ActiveEnterTimestampMonotonic": "566173724", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "network-online.target sysinit.target -.mount systemd-journald.socket basic.target system.slice", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2024-12-21 11:33:24 EST", "AssertTimestampMonotonic": "566130715", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2024-12-21 11:33:24 EST", "ConditionTimestampMonotonic": "566130712", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-demo-network.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveTasksMax": "22349", "ExecMainCode": "1", "ExecMainExitTimestamp": "Sat 2024-12-21 11:33:24 EST", "ExecMainExitTimestampMonotonic": "566173510", "ExecMainHandoffTimestamp": "Sat 2024-12-21 11:33:24 EST", "ExecMainHandoffTimestampMonotonic": "566140601", "ExecMainPID": "35919", "ExecMainStartTimestamp": "Sat 2024-12-21 11:33:24 EST", "ExecMainStartTimestampMonotonic": "566131611", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo-network.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo-network.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2024-12-21 11:33:24 EST", "InactiveExitTimestampMonotonic": "566132061", "InvocationID": "a956234a38c3457c881b5304660e7e06", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3112439808", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo-network.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "sysinit.target system.slice -.mount", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo.network", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2024-12-21 11:33:24 EST", "StateChangeTimestampMonotonic": "566173724", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "exited", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo-network", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 21 December 2024 11:36:05 -0500 (0:00:00.838) 0:03:14.494 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1734798803.4432044, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "e57c08d49aff4bae8daab138d913aeddaa8682a0", "ctime": 1734798803.4502046, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 616562899, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1734798802.9521976, "nlink": 1, "path": "/etc/containers/systemd/quadlet-demo.network", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 74, "uid": 0, "version": "4286853892", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 21 December 2024 11:36:05 -0500 (0:00:00.476) 0:03:14.971 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 21 December 2024 11:36:05 -0500 (0:00:00.084) 0:03:15.055 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 21 December 2024 11:36:06 -0500 (0:00:00.392) 0:03:15.448 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 21 December 2024 11:36:06 -0500 (0:00:00.091) 0:03:15.539 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 21 December 2024 11:36:06 -0500 (0:00:00.052) 0:03:15.592 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 21 December 2024 11:36:06 -0500 (0:00:00.040) 0:03:15.633 ***** changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/quadlet-demo.network", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 21 December 2024 11:36:06 -0500 (0:00:00.384) 0:03:16.018 ***** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 21 December 2024 11:36:07 -0500 (0:00:00.748) 0:03:16.766 ***** changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 21 December 2024 11:36:08 -0500 (0:00:00.426) 0:03:17.193 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 21 December 2024 11:36:08 -0500 (0:00:00.046) 0:03:17.240 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 21 December 2024 11:36:08 -0500 (0:00:00.033) 0:03:17.273 ***** changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.028845", "end": "2024-12-21 11:36:08.501184", "rc": 0, "start": "2024-12-21 11:36:08.472339" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 21 December 2024 11:36:08 -0500 (0:00:00.428) 0:03:17.702 ***** included: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 21 December 2024 11:36:08 -0500 (0:00:00.106) 0:03:17.808 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 21 December 2024 11:36:08 -0500 (0:00:00.056) 0:03:17.865 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 21 December 2024 11:36:08 -0500 (0:00:00.126) 0:03:17.992 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 21 December 2024 11:36:08 -0500 (0:00:00.057) 0:03:18.049 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.031997", "end": "2024-12-21 11:36:09.306154", "rc": 0, "start": "2024-12-21 11:36:09.274157" } STDOUT: localhost/podman-pause 5.3.1-1733097600 e20c1c9f26b9 6 minutes ago 701 kB quay.io/libpod/testimage 20210610 9f9ec7f2fdef 3 years ago 7.99 MB TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 21 December 2024 11:36:09 -0500 (0:00:00.436) 0:03:18.485 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.027080", "end": "2024-12-21 11:36:09.741010", "rc": 0, "start": "2024-12-21 11:36:09.713930" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 21 December 2024 11:36:09 -0500 (0:00:00.463) 0:03:18.948 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.034528", "end": "2024-12-21 11:36:10.217545", "rc": 0, "start": "2024-12-21 11:36:10.183017" } STDOUT: 1e5d0ec512aa localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes ce41ff3061ea-service 6232f80e7e24 localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp d2f6641bb2ef-infra fb676e5504a3 quay.io/libpod/testimage:20210610 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp httpd2-httpd2 61716cd30289 localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes a8db008d7cc8-service 2907e4388cf2 localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 0.0.0.0:15003->80/tcp 26cc0fa7c809-infra a6061cf8dd2a quay.io/libpod/testimage:20210610 3 minutes ago Up 3 minutes 0.0.0.0:15003->80/tcp httpd3-httpd3 TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 21 December 2024 11:36:10 -0500 (0:00:00.471) 0:03:19.419 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.028793", "end": "2024-12-21 11:36:10.675975", "rc": 0, "start": "2024-12-21 11:36:10.647182" } STDOUT: podman podman-default-kube-network TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 21 December 2024 11:36:10 -0500 (0:00:00.436) 0:03:19.856 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 21 December 2024 11:36:11 -0500 (0:00:00.410) 0:03:20.267 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 21 December 2024 11:36:11 -0500 (0:00:00.442) 0:03:20.709 ***** ok: [managed-node2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 21 December 2024 11:36:13 -0500 (0:00:02.130) 0:03:22.840 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Saturday 21 December 2024 11:36:13 -0500 (0:00:00.058) 0:03:22.898 ***** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Saturday 21 December 2024 11:36:13 -0500 (0:00:00.055) 0:03:22.954 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Saturday 21 December 2024 11:36:13 -0500 (0:00:00.044) 0:03:22.999 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Ensure no resources] ***************************************************** task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:188 Saturday 21 December 2024 11:36:13 -0500 (0:00:00.058) 0:03:23.057 ***** fatal: [managed-node2]: FAILED! => { "assertion": "__podman_test_debug_images.stdout == \"\"", "changed": false, "evaluated_to": false } MSG: Assertion failed TASK [Debug] ******************************************************************* task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:199 Saturday 21 December 2024 11:36:13 -0500 (0:00:00.042) 0:03:23.100 ***** ok: [managed-node2] => { "changed": false, "cmd": "exec 1>&2\nset -x\nset -o pipefail\nsystemctl list-units --plain -l --all | grep quadlet || :\nsystemctl list-unit-files --all | grep quadlet || :\nsystemctl list-units --plain --failed -l --all | grep quadlet || :\n", "delta": "0:00:00.377867", "end": "2024-12-21 11:36:14.672227", "rc": 0, "start": "2024-12-21 11:36:14.294360" } STDERR: + set -o pipefail + systemctl list-units --plain -l --all + grep quadlet + : + systemctl list-unit-files --all + grep quadlet + : + systemctl list-units --plain --failed -l --all + grep quadlet + : TASK [Get journald] ************************************************************ task path: /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:209 Saturday 21 December 2024 11:36:14 -0500 (0:00:00.750) 0:03:23.850 ***** fatal: [managed-node2]: FAILED! => { "changed": false, "cmd": [ "journalctl", "-ex" ], "delta": "0:00:00.030251", "end": "2024-12-21 11:36:15.087626", "failed_when_result": true, "rc": 0, "start": "2024-12-21 11:36:15.057375" } STDOUT: Dec 21 11:31:39 managed-node2 systemd[22955]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Dec 21 11:31:39 managed-node2 systemd[22955]: Reached target paths.target - Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 8. Dec 21 11:31:39 managed-node2 systemd[22955]: Reached target timers.target - Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 10. Dec 21 11:31:39 managed-node2 systemd[22955]: Starting dbus.socket - D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 4. Dec 21 11:31:39 managed-node2 systemd[22955]: Starting systemd-tmpfiles-setup.service - Create User Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 9. Dec 21 11:31:39 managed-node2 systemd[22955]: Finished systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 9. Dec 21 11:31:39 managed-node2 systemd[22955]: Listening on dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 4. Dec 21 11:31:39 managed-node2 systemd[22955]: Reached target sockets.target - Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Dec 21 11:31:39 managed-node2 systemd[22955]: Reached target basic.target - Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Dec 21 11:31:39 managed-node2 systemd[22955]: Reached target default.target - Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Dec 21 11:31:39 managed-node2 systemd[22955]: Startup finished in 71ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 3001 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 71781 microseconds. Dec 21 11:31:39 managed-node2 systemd[1]: Started user@3001.service - User Manager for UID 3001. ░░ Subject: A start job for unit user@3001.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@3001.service has finished successfully. ░░ ░░ The job identifier is 1906. Dec 21 11:31:40 managed-node2 python3.12[23101]: ansible-file Invoked with path=/tmp/lsr_9pquyim__podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:31:40 managed-node2 python3.12[23232]: ansible-file Invoked with path=/tmp/lsr_9pquyim__podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:31:41 managed-node2 sudo[23405]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mlxgabbcqgabjikhfvvgbxwcejnqigki ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1734798700.790551-14193-4218201843360/AnsiballZ_podman_image.py' Dec 21 11:31:41 managed-node2 sudo[23405]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-23405) opened. Dec 21 11:31:41 managed-node2 sudo[23405]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Dec 21 11:31:41 managed-node2 systemd[22955]: Created slice session.slice - User Core Session Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 18. Dec 21 11:31:41 managed-node2 systemd[22955]: Starting dbus-broker.service - D-Bus User Message Bus... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Dec 21 11:31:41 managed-node2 dbus-broker-launch[23428]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Dec 21 11:31:41 managed-node2 dbus-broker-launch[23428]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Dec 21 11:31:41 managed-node2 systemd[22955]: Started dbus-broker.service - D-Bus User Message Bus. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Dec 21 11:31:41 managed-node2 dbus-broker-launch[23428]: Ready Dec 21 11:31:41 managed-node2 systemd[22955]: Created slice user.slice - Slice /user. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 20. Dec 21 11:31:41 managed-node2 systemd[22955]: Started podman-23415.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 19. Dec 21 11:31:41 managed-node2 systemd[22955]: Started podman-pause-2085f4d5.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 23. Dec 21 11:31:41 managed-node2 systemd[22955]: Started podman-23431.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 27. Dec 21 11:31:42 managed-node2 systemd[22955]: Started podman-23456.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 31. Dec 21 11:31:42 managed-node2 sudo[23405]: pam_unix(sudo:session): session closed for user podman_basic_user Dec 21 11:31:43 managed-node2 python3.12[23594]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:31:43 managed-node2 python3.12[23725]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:31:44 managed-node2 python3.12[23856]: ansible-ansible.legacy.stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 21 11:31:44 managed-node2 python3.12[23961]: ansible-ansible.legacy.copy Invoked with dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml owner=podman_basic_user group=3001 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1734798703.8676434-14342-119477966397925/.source.yml _original_basename=.hf_9mcra follow=False checksum=feb9b844cfd9411d5e266c9ae51c9bc39858ae7b backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:31:44 managed-node2 sudo[24134]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nbzglxotblskbezkxdxfzroesmpbdbeu ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1734798704.6384764-14383-112561728099746/AnsiballZ_podman_play.py' Dec 21 11:31:44 managed-node2 sudo[24134]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-24134) opened. Dec 21 11:31:44 managed-node2 sudo[24134]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Dec 21 11:31:45 managed-node2 python3.12[24137]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Dec 21 11:31:45 managed-node2 systemd[22955]: Started podman-24145.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 35. Dec 21 11:31:45 managed-node2 systemd[22955]: Created slice user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice - cgroup user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 39. Dec 21 11:31:45 managed-node2 kernel: tun: Universal TUN/TAP device driver, 1.6 Dec 21 11:31:45 managed-node2 systemd[22955]: Started rootless-netns-08629e8e.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 43. Dec 21 11:31:45 managed-node2 kernel: bridge: filtering via arp/ip/ip6tables is no longer available by default. Update your scripts to load br_netfilter if you need this. Dec 21 11:31:45 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Dec 21 11:31:45 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Dec 21 11:31:45 managed-node2 kernel: veth0: entered allmulticast mode Dec 21 11:31:45 managed-node2 kernel: veth0: entered promiscuous mode Dec 21 11:31:45 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Dec 21 11:31:45 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Dec 21 11:31:45 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Dec 21 11:31:45 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Dec 21 11:31:45 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Dec 21 11:31:45 managed-node2 systemd[22955]: Started run-r539aaca40e7043889c4c288547e1a016.scope - /usr/libexec/podman/aardvark-dns --config /run/user/3001/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 47. Dec 21 11:31:45 managed-node2 aardvark-dns[24228]: starting aardvark on a child with pid 24229 Dec 21 11:31:45 managed-node2 aardvark-dns[24229]: Successfully parsed config Dec 21 11:31:45 managed-node2 aardvark-dns[24229]: Listen v4 ip {"podman-default-kube-network": [10.89.0.1]} Dec 21 11:31:45 managed-node2 aardvark-dns[24229]: Listen v6 ip {} Dec 21 11:31:45 managed-node2 aardvark-dns[24229]: Using the following upstream servers: [169.254.1.1:53, 10.29.169.13:53, 10.29.170.12:53] Dec 21 11:31:45 managed-node2 conmon[24244]: conmon db0839eafbe8b5b2b368 : failed to write to /proc/self/oom_score_adj: Permission denied Dec 21 11:31:45 managed-node2 systemd[22955]: Started libpod-conmon-db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 51. Dec 21 11:31:45 managed-node2 conmon[24245]: conmon db0839eafbe8b5b2b368 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/14/attach} Dec 21 11:31:45 managed-node2 conmon[24245]: conmon db0839eafbe8b5b2b368 : terminal_ctrl_fd: 14 Dec 21 11:31:45 managed-node2 conmon[24245]: conmon db0839eafbe8b5b2b368 : winsz read side: 17, winsz write side: 18 Dec 21 11:31:45 managed-node2 systemd[22955]: Started libpod-db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 56. Dec 21 11:31:45 managed-node2 conmon[24245]: conmon db0839eafbe8b5b2b368 : container PID: 24247 Dec 21 11:31:45 managed-node2 conmon[24249]: conmon a6748a195df11a7b054e : failed to write to /proc/self/oom_score_adj: Permission denied Dec 21 11:31:45 managed-node2 systemd[22955]: Started libpod-conmon-a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 61. Dec 21 11:31:45 managed-node2 conmon[24250]: conmon a6748a195df11a7b054e : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/13/attach} Dec 21 11:31:45 managed-node2 conmon[24250]: conmon a6748a195df11a7b054e : terminal_ctrl_fd: 13 Dec 21 11:31:45 managed-node2 conmon[24250]: conmon a6748a195df11a7b054e : winsz read side: 16, winsz write side: 17 Dec 21 11:31:45 managed-node2 systemd[22955]: Started libpod-a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 66. Dec 21 11:31:45 managed-node2 conmon[24250]: conmon a6748a195df11a7b054e : container PID: 24252 Dec 21 11:31:45 managed-node2 python3.12[24137]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Dec 21 11:31:45 managed-node2 python3.12[24137]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod: a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f Container: a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18 Dec 21 11:31:45 managed-node2 python3.12[24137]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2024-12-21T11:31:45-05:00" level=info msg="/bin/podman filtering at log level debug" time="2024-12-21T11:31:45-05:00" level=debug msg="Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2024-12-21T11:31:45-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2024-12-21T11:31:45-05:00" level=info msg="Using sqlite as database backend" time="2024-12-21T11:31:45-05:00" level=debug msg="systemd-logind: Unknown object '/'." time="2024-12-21T11:31:45-05:00" level=debug msg="Using graph driver overlay" time="2024-12-21T11:31:45-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" time="2024-12-21T11:31:45-05:00" level=debug msg="Using run root /run/user/3001/containers" time="2024-12-21T11:31:45-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" time="2024-12-21T11:31:45-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" time="2024-12-21T11:31:45-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" time="2024-12-21T11:31:45-05:00" level=debug msg="Using transient store: false" time="2024-12-21T11:31:45-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2024-12-21T11:31:45-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2024-12-21T11:31:45-05:00" level=debug msg="Cached value indicated that metacopy is not being used" time="2024-12-21T11:31:45-05:00" level=debug msg="Cached value indicated that native-diff is usable" time="2024-12-21T11:31:45-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" time="2024-12-21T11:31:45-05:00" level=debug msg="Initializing event backend file" time="2024-12-21T11:31:45-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2024-12-21T11:31:45-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2024-12-21T11:31:45-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2024-12-21T11:31:45-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" time="2024-12-21T11:31:45-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2024-12-21T11:31:45-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2024-12-21T11:31:45-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2024-12-21T11:31:45-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" time="2024-12-21T11:31:45-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2024-12-21T11:31:45-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" time="2024-12-21T11:31:45-05:00" level=info msg="Setting parallel job count to 7" time="2024-12-21T11:31:45-05:00" level=debug msg="Successfully loaded 1 networks" time="2024-12-21T11:31:45-05:00" level=debug msg="found free device name podman1" time="2024-12-21T11:31:45-05:00" level=debug msg="found free ipv4 network subnet 10.89.0.0/24" time="2024-12-21T11:31:45-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2024-12-21T11:31:45-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-21T11:31:45-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2024-12-21T11:31:45-05:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\" does not resolve to an image ID" time="2024-12-21T11:31:45-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2024-12-21T11:31:45-05:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\" does not resolve to an image ID" time="2024-12-21T11:31:45-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2024-12-21T11:31:45-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-21T11:31:45-05:00" level=debug msg="FROM \"scratch\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are not supported" time="2024-12-21T11:31:45-05:00" level=debug msg="Check for idmapped mounts support " time="2024-12-21T11:31:45-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-21T11:31:45-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-21T11:31:45-05:00" level=debug msg="overlay: test mount indicated that volatile is being used" time="2024-12-21T11:31:45-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/c5eac758b864e56a456f3167d9879006a4d1585c6fdda99948f4097d640d7701/empty,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/c5eac758b864e56a456f3167d9879006a4d1585c6fdda99948f4097d640d7701/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/c5eac758b864e56a456f3167d9879006a4d1585c6fdda99948f4097d640d7701/work,userxattr,volatile,context=\"system_u:object_r:container_file_t:s0:c234,c998\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Container ID: 62b696e6e009827713e5104e7c602f2963c9d871df9a93b1f76952070a49c3d3" time="2024-12-21T11:31:45-05:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:copy Args:[/usr/libexec/podman/catatonit /catatonit] Flags:[] Attrs:map[] Message:COPY /usr/libexec/podman/catatonit /catatonit Heredocs:[] Original:COPY /usr/libexec/podman/catatonit /catatonit}" time="2024-12-21T11:31:45-05:00" level=debug msg="COPY []string(nil), imagebuilder.Copy{FromFS:false, From:\"\", Src:[]string{\"/usr/libexec/podman/catatonit\"}, Dest:\"/catatonit\", Download:false, Chown:\"\", Chmod:\"\", Checksum:\"\", Files:[]imagebuilder.File(nil), KeepGitDir:false, Link:false, Parents:false, Excludes:[]string(nil)}" time="2024-12-21T11:31:45-05:00" level=debug msg="EnsureContainerPath \"/\" (owner \"\", mode 0) in \"62b696e6e009827713e5104e7c602f2963c9d871df9a93b1f76952070a49c3d3\"" time="2024-12-21T11:31:45-05:00" level=debug msg="added content file:872045bc026632654869cebc14f91fe531969760bf26925e889d9b70527c8b67" time="2024-12-21T11:31:45-05:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:entrypoint Args:[/catatonit -P] Flags:[] Attrs:map[json:true] Message:ENTRYPOINT /catatonit -P Heredocs:[] Original:ENTRYPOINT [\"/catatonit\", \"-P\"]}" time="2024-12-21T11:31:45-05:00" level=debug msg="EnsureContainerPath \"/\" (owner \"\", mode 0) in \"62b696e6e009827713e5104e7c602f2963c9d871df9a93b1f76952070a49c3d3\"" time="2024-12-21T11:31:45-05:00" level=debug msg="COMMIT localhost/podman-pause:5.3.1-1733097600" time="2024-12-21T11:31:45-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\"" time="2024-12-21T11:31:45-05:00" level=debug msg="COMMIT \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\"" time="2024-12-21T11:31:45-05:00" level=debug msg="committing image with reference \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\" is allowed by policy" time="2024-12-21T11:31:45-05:00" level=debug msg="layer list: [\"c5eac758b864e56a456f3167d9879006a4d1585c6fdda99948f4097d640d7701\"]" time="2024-12-21T11:31:45-05:00" level=debug msg="using \"/var/tmp/buildah502132700\" to hold temporary data" time="2024-12-21T11:31:45-05:00" level=debug msg="Tar with options on /home/podman_basic_user/.local/share/containers/storage/overlay/c5eac758b864e56a456f3167d9879006a4d1585c6fdda99948f4097d640d7701/diff" time="2024-12-21T11:31:45-05:00" level=debug msg="layer \"c5eac758b864e56a456f3167d9879006a4d1585c6fdda99948f4097d640d7701\" size is 699392 bytes, uncompressed digest sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6, possibly-compressed digest sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6" time="2024-12-21T11:31:45-05:00" level=debug msg="OCIv1 config = {\"created\":\"2024-12-21T16:31:45.305846253Z\",\"architecture\":\"amd64\",\"os\":\"linux\",\"config\":{\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Entrypoint\":[\"/catatonit\",\"-P\"],\"WorkingDir\":\"/\",\"Labels\":{\"io.buildah.version\":\"1.38.0\"}},\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"]},\"history\":[{\"created\":\"2024-12-21T16:31:45.27727152Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:872045bc026632654869cebc14f91fe531969760bf26925e889d9b70527c8b67 in /catatonit \",\"empty_layer\":true},{\"created\":\"2024-12-21T16:31:45.308897263Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2024-12-21T11:31:45-05:00" level=debug msg="OCIv1 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.oci.image.manifest.v1+json\",\"config\":{\"mediaType\":\"application/vnd.oci.image.config.v1+json\",\"digest\":\"sha256:c3f09c67e7a58bd4757acf02fed5a1339d63b8f733699e9ec3924633a599d4d7\",\"size\":684},\"layers\":[{\"mediaType\":\"application/vnd.oci.image.layer.v1.tar\",\"digest\":\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\",\"size\":699392}],\"annotations\":{\"org.opencontainers.image.base.digest\":\"\",\"org.opencontainers.image.base.name\":\"\"}}" time="2024-12-21T11:31:45-05:00" level=debug msg="Docker v2s2 config = {\"created\":\"2024-12-21T16:31:45.305846253Z\",\"container\":\"62b696e6e009827713e5104e7c602f2963c9d871df9a93b1f76952070a49c3d3\",\"container_config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":null,\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"/\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.38.0\"}},\"config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":null,\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"/\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.38.0\"}},\"architecture\":\"amd64\",\"os\":\"linux\",\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"]},\"history\":[{\"created\":\"2024-12-21T16:31:45.27727152Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:872045bc026632654869cebc14f91fe531969760bf26925e889d9b70527c8b67 in /catatonit \",\"empty_layer\":true},{\"created\":\"2024-12-21T16:31:45.308897263Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2024-12-21T11:31:45-05:00" level=debug msg="Docker v2s2 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.docker.distribution.manifest.v2+json\",\"config\":{\"mediaType\":\"application/vnd.docker.container.image.v1+json\",\"size\":1347,\"digest\":\"sha256:04b3243342b7e6e0adbf1081e9d3ced898cdc056758e229975a4bbaa2278f30f\"},\"layers\":[{\"mediaType\":\"application/vnd.docker.image.rootfs.diff.tar\",\"size\":699392,\"digest\":\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"}]}" time="2024-12-21T11:31:45-05:00" level=debug msg="Using SQLite blob info cache at /home/podman_basic_user/.local/share/containers/cache/blob-info-cache-v1.sqlite" time="2024-12-21T11:31:45-05:00" level=debug msg="IsRunningImageAllowed for image containers-storage:" time="2024-12-21T11:31:45-05:00" level=debug msg=" Using transport \"containers-storage\" policy section \"\"" time="2024-12-21T11:31:45-05:00" level=debug msg=" Requirement 0: allowed" time="2024-12-21T11:31:45-05:00" level=debug msg="Overall: allowed" time="2024-12-21T11:31:45-05:00" level=debug msg="start reading config" time="2024-12-21T11:31:45-05:00" level=debug msg="finished reading config" time="2024-12-21T11:31:45-05:00" level=debug msg="Manifest has MIME type application/vnd.oci.image.manifest.v1+json, ordered candidate list [application/vnd.oci.image.manifest.v1+json, application/vnd.docker.distribution.manifest.v2+json, application/vnd.docker.distribution.manifest.v1+prettyjws, application/vnd.docker.distribution.manifest.v1+json]" time="2024-12-21T11:31:45-05:00" level=debug msg="... will first try using the original manifest unmodified" time="2024-12-21T11:31:45-05:00" level=debug msg="Checking if we can reuse blob sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6: general substitution = true, compression for MIME type \"application/vnd.oci.image.layer.v1.tar\" = true" time="2024-12-21T11:31:45-05:00" level=debug msg="reading layer \"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"" time="2024-12-21T11:31:45-05:00" level=debug msg="No compression detected" time="2024-12-21T11:31:45-05:00" level=debug msg="Using original blob without modification" time="2024-12-21T11:31:45-05:00" level=debug msg="Applying tar in /home/podman_basic_user/.local/share/containers/storage/overlay/83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6/diff" time="2024-12-21T11:31:45-05:00" level=debug msg="finished reading layer \"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"" time="2024-12-21T11:31:45-05:00" level=debug msg="No compression detected" time="2024-12-21T11:31:45-05:00" level=debug msg="Compression change for blob sha256:c3f09c67e7a58bd4757acf02fed5a1339d63b8f733699e9ec3924633a599d4d7 (\"application/vnd.oci.image.config.v1+json\") not supported" time="2024-12-21T11:31:45-05:00" level=debug msg="Using original blob without modification" time="2024-12-21T11:31:45-05:00" level=debug msg="setting image creation date to 2024-12-21 16:31:45.305846253 +0000 UTC" time="2024-12-21T11:31:45-05:00" level=debug msg="created new image ID \"c3f09c67e7a58bd4757acf02fed5a1339d63b8f733699e9ec3924633a599d4d7\" with metadata \"{}\"" time="2024-12-21T11:31:45-05:00" level=debug msg="added name \"localhost/podman-pause:5.3.1-1733097600\" to image \"c3f09c67e7a58bd4757acf02fed5a1339d63b8f733699e9ec3924633a599d4d7\"" time="2024-12-21T11:31:45-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\"" time="2024-12-21T11:31:45-05:00" level=debug msg="printing final image id \"c3f09c67e7a58bd4757acf02fed5a1339d63b8f733699e9ec3924633a599d4d7\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Pod using bridge network mode" time="2024-12-21T11:31:45-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice for parent user.slice and name libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f" time="2024-12-21T11:31:45-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice" time="2024-12-21T11:31:45-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice" time="2024-12-21T11:31:45-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2024-12-21T11:31:45-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-21T11:31:45-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2024-12-21T11:31:45-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@c3f09c67e7a58bd4757acf02fed5a1339d63b8f733699e9ec3924633a599d4d7\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2024-12-21T11:31:45-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@c3f09c67e7a58bd4757acf02fed5a1339d63b8f733699e9ec3924633a599d4d7)" time="2024-12-21T11:31:45-05:00" level=debug msg="exporting opaque data as blob \"sha256:c3f09c67e7a58bd4757acf02fed5a1339d63b8f733699e9ec3924633a599d4d7\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Inspecting image c3f09c67e7a58bd4757acf02fed5a1339d63b8f733699e9ec3924633a599d4d7" time="2024-12-21T11:31:45-05:00" level=debug msg="exporting opaque data as blob \"sha256:c3f09c67e7a58bd4757acf02fed5a1339d63b8f733699e9ec3924633a599d4d7\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Inspecting image c3f09c67e7a58bd4757acf02fed5a1339d63b8f733699e9ec3924633a599d4d7" time="2024-12-21T11:31:45-05:00" level=debug msg="Inspecting image c3f09c67e7a58bd4757acf02fed5a1339d63b8f733699e9ec3924633a599d4d7" time="2024-12-21T11:31:45-05:00" level=debug msg="using systemd mode: false" time="2024-12-21T11:31:45-05:00" level=debug msg="setting container name a1abeaf778ea-infra" time="2024-12-21T11:31:45-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network eadc88e6c881bd13256e5545b5f705fde2984752335660ec32604c485b4d82ec bridge podman1 2024-12-21 11:31:45.134348204 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2024-12-21T11:31:45-05:00" level=debug msg="Successfully loaded 2 networks" time="2024-12-21T11:31:45-05:00" level=debug msg="Allocated lock 1 for container db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd" time="2024-12-21T11:31:45-05:00" level=debug msg="exporting opaque data as blob \"sha256:c3f09c67e7a58bd4757acf02fed5a1339d63b8f733699e9ec3924633a599d4d7\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Created container \"db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Container \"db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd/userdata\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Container \"db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd\" has run directory \"/run/user/3001/containers/overlay-containers/db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd/userdata\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-21T11:31:45-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-21T11:31:45-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-12-21T11:31:45-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-21T11:31:45-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-12-21T11:31:45-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2024-12-21T11:31:45-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-21T11:31:45-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-21T11:31:45-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-12-21T11:31:45-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-21T11:31:45-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-12-21T11:31:45-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-21T11:31:45-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-21T11:31:45-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-12-21T11:31:45-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-21T11:31:45-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-12-21T11:31:45-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-12-21T11:31:45-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:31:45-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-21T11:31:45-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-21T11:31:45-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-12-21T11:31:45-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-21T11:31:45-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-12-21T11:31:45-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-12-21T11:31:45-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:31:45-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-12-21T11:31:45-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-12-21T11:31:45-05:00" level=debug msg="using systemd mode: false" time="2024-12-21T11:31:45-05:00" level=debug msg="adding container to pod httpd1" time="2024-12-21T11:31:45-05:00" level=debug msg="setting container name httpd1-httpd1" time="2024-12-21T11:31:45-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2024-12-21T11:31:45-05:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2024-12-21T11:31:45-05:00" level=debug msg="Adding mount /proc" time="2024-12-21T11:31:45-05:00" level=debug msg="Adding mount /dev" time="2024-12-21T11:31:45-05:00" level=debug msg="Adding mount /dev/pts" time="2024-12-21T11:31:45-05:00" level=debug msg="Adding mount /dev/mqueue" time="2024-12-21T11:31:45-05:00" level=debug msg="Adding mount /sys" time="2024-12-21T11:31:45-05:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2024-12-21T11:31:45-05:00" level=debug msg="Allocated lock 2 for container a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18" time="2024-12-21T11:31:45-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Created container \"a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Container \"a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18/userdata\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Container \"a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18\" has run directory \"/run/user/3001/containers/overlay-containers/a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18/userdata\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Strongconnecting node db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd" time="2024-12-21T11:31:45-05:00" level=debug msg="Pushed db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd onto stack" time="2024-12-21T11:31:45-05:00" level=debug msg="Finishing node db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd. Popped db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd off stack" time="2024-12-21T11:31:45-05:00" level=debug msg="Strongconnecting node a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18" time="2024-12-21T11:31:45-05:00" level=debug msg="Pushed a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18 onto stack" time="2024-12-21T11:31:45-05:00" level=debug msg="Finishing node a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18. Popped a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18 off stack" time="2024-12-21T11:31:45-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/CMI66WDV4TLVHMI2F4V54VJ5MC,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/e1d213aeb5ded1ac291e18fa5ef816218bc192a61950348e6c5bcc38d826d567/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/e1d213aeb5ded1ac291e18fa5ef816218bc192a61950348e6c5bcc38d826d567/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c541,c840\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Made network namespace at /run/user/3001/netns/netns-9c865f86-001f-67d3-ac39-4d04ef79a213 for container db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd" time="2024-12-21T11:31:45-05:00" level=debug msg="Mounted container \"db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/e1d213aeb5ded1ac291e18fa5ef816218bc192a61950348e6c5bcc38d826d567/merged\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Created root filesystem for container db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd at /home/podman_basic_user/.local/share/containers/storage/overlay/e1d213aeb5ded1ac291e18fa5ef816218bc192a61950348e6c5bcc38d826d567/merged" time="2024-12-21T11:31:45-05:00" level=debug msg="Creating rootless network namespace at \"/run/user/3001/containers/networks/rootless-netns/rootless-netns\"" time="2024-12-21T11:31:45-05:00" level=debug msg="pasta arguments: --config-net --pid /run/user/3001/containers/networks/rootless-netns/rootless-netns-conn.pid --dns-forward 169.254.1.1 -t none -u none -T none -U none --no-map-gw --quiet --netns /run/user/3001/containers/networks/rootless-netns/rootless-netns --map-guest-addr 169.254.1.2" time="2024-12-21T11:31:45-05:00" level=debug msg="The path of /etc/resolv.conf in the mount ns is \"/etc/resolv.conf\"" [DEBUG netavark::network::validation] Validating network namespace... [DEBUG netavark::commands::setup] Setting up... [INFO netavark::firewall] Using nftables firewall driver [DEBUG netavark::network::bridge] Setup network podman-default-kube-network [DEBUG netavark::network::bridge] Container interface name: eth0 with IP addresses [10.89.0.2/24] [DEBUG netavark::network::bridge] Bridge name: podman1 with IP addresses [10.89.0.1/24] [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.ip_forward to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/podman1/rp_filter to 2 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv6/conf/eth0/autoconf to 0 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/arp_notify to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/rp_filter to 2 [INFO netavark::network::netlink] Adding route (dest: 0.0.0.0/0 ,gw: 10.89.0.1, metric 100) [INFO netavark::firewall::nft] Creating container chain nv_eadc88e6_10_89_0_0_nm24 [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.conf.podman1.route_localnet to 1 [DEBUG netavark::dns::aardvark] Spawning aardvark server [DEBUG netavark::dns::aardvark] start aardvark-dns: ["systemd-run", "-q", "--scope", "--user", "/usr/libexec/podman/aardvark-dns", "--config", "/run/user/3001/containers/networks/aardvark-dns", "-p", "53", "run"] [DEBUG netavark::commands::setup] { "podman-default-kube-network": StatusBlock { dns_search_domains: Some( [ "dns.podman", ], ), dns_server_ips: Some( [ 10.89.0.1, ], ), interfaces: Some( { "eth0": NetInterface { mac_address: "52:c1:93:d2:4b:a2", subnets: Some( [ NetAddress { gateway: Some( 10.89.0.1, ), ipnet: 10.89.0.2/24, }, ], ), }, }, ), }, } [DEBUG netavark::commands::setup] Setup complete time="2024-12-21T11:31:45-05:00" level=debug msg="rootlessport: time=\"2024-12-21T11:31:45-05:00\" level=info msg=\"Starting parent driver\"\n" time="2024-12-21T11:31:45-05:00" level=debug msg="rootlessport: time=\"2024-12-21T11:31:45-05:00\" level=info msg=\"opaque=map[builtin.readypipepath:/run/user/3001/libpod/tmp/rootlessport1625359831/.bp-ready.pipe builtin.socketpath:/run/user/3001/libpod/tmp/rootlessport1625359831/.bp.sock]\"\n" time="2024-12-21T11:31:45-05:00" level=debug msg="rootlessport: time=\"2024-12-21T11:31:45-05:00\" level=info msg=\"Starting child driver in child netns (\\\"/proc/self/exe\\\" [rootlessport-child])\"\n" time="2024-12-21T11:31:45-05:00" level=debug msg="rootlessport: time=\"2024-12-21T11:31:45-05:00\" level=info msg=\"Waiting for initComplete\"\n" time="2024-12-21T11:31:45-05:00" level=debug msg="rootlessport: time=\"2024-12-21T11:31:45-05:00\" level=info msg=\"initComplete is closed; parent and child established the communication channel\"\n" time="2024-12-21T11:31:45-05:00" level=debug msg="rootlessport: time=\"2024-12-21T11:31:45-05:00\" level=info msg=\"Exposing ports [{ 80 15001 1 tcp}]\"\n" time="2024-12-21T11:31:45-05:00" level=debug msg="rootlessport: time=\"2024-12-21T11:31:45-05:00\" level=info msg=Ready\n" time="2024-12-21T11:31:45-05:00" level=debug msg="rootlessport is ready" time="2024-12-21T11:31:45-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2024-12-21T11:31:45-05:00" level=debug msg="Setting Cgroups for container db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd to user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice:libpod:db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd" time="2024-12-21T11:31:45-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2024-12-21T11:31:45-05:00" level=debug msg="Workdir \"/\" resolved to host path \"/home/podman_basic_user/.local/share/containers/storage/overlay/e1d213aeb5ded1ac291e18fa5ef816218bc192a61950348e6c5bcc38d826d567/merged\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Created OCI spec for container db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd/userdata/config.json" time="2024-12-21T11:31:45-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice for parent user.slice and name libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f" time="2024-12-21T11:31:45-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice" time="2024-12-21T11:31:45-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice" time="2024-12-21T11:31:45-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2024-12-21T11:31:45-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd -u db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd -r /usr/bin/crun -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd/userdata -p /run/user/3001/containers/overlay-containers/db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd/userdata/pidfile -n a1abeaf778ea-infra --exit-dir /run/user/3001/libpod/tmp/exits --persist-dir /run/user/3001/libpod/tmp/persist/db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd --full-attach -s -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd]" time="2024-12-21T11:31:45-05:00" level=info msg="Running conmon under slice user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice and unitName libpod-conmon-db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd.scope" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2024-12-21T11:31:45-05:00" level=debug msg="Received: 24247" time="2024-12-21T11:31:45-05:00" level=info msg="Got Conmon PID as 24245" time="2024-12-21T11:31:45-05:00" level=debug msg="Created container db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd in OCI runtime" time="2024-12-21T11:31:45-05:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2024-12-21T11:31:45-05:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2024-12-21T11:31:45-05:00" level=debug msg="Starting container db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd with command [/catatonit -P]" time="2024-12-21T11:31:45-05:00" level=debug msg="Started container db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd" time="2024-12-21T11:31:45-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/GEOVXMGCF5JCURGOOQCQYSNIJA,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/b48d9d14f9126819028a1cfc335f0e84ac40a302b8ba8ccfeb0beebbbfd41d40/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/b48d9d14f9126819028a1cfc335f0e84ac40a302b8ba8ccfeb0beebbbfd41d40/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c541,c840\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Mounted container \"a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/b48d9d14f9126819028a1cfc335f0e84ac40a302b8ba8ccfeb0beebbbfd41d40/merged\"" time="2024-12-21T11:31:45-05:00" level=debug msg="Created root filesystem for container a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18 at /home/podman_basic_user/.local/share/containers/storage/overlay/b48d9d14f9126819028a1cfc335f0e84ac40a302b8ba8ccfeb0beebbbfd41d40/merged" time="2024-12-21T11:31:45-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2024-12-21T11:31:45-05:00" level=debug msg="Setting Cgroups for container a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18 to user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice:libpod:a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18" time="2024-12-21T11:31:45-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2024-12-21T11:31:45-05:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2024-12-21T11:31:45-05:00" level=debug msg="Created OCI spec for container a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18/userdata/config.json" time="2024-12-21T11:31:45-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice for parent user.slice and name libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f" time="2024-12-21T11:31:45-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice" time="2024-12-21T11:31:45-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice" time="2024-12-21T11:31:45-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2024-12-21T11:31:45-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18 -u a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18 -r /usr/bin/crun -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18/userdata -p /run/user/3001/containers/overlay-containers/a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18/userdata/pidfile -n httpd1-httpd1 --exit-dir /run/user/3001/libpod/tmp/exits --persist-dir /run/user/3001/libpod/tmp/persist/a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18 --full-attach -s -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18]" time="2024-12-21T11:31:45-05:00" level=info msg="Running conmon under slice user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice and unitName libpod-conmon-a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18.scope" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2024-12-21T11:31:45-05:00" level=debug msg="Received: 24252" time="2024-12-21T11:31:45-05:00" level=info msg="Got Conmon PID as 24250" time="2024-12-21T11:31:45-05:00" level=debug msg="Created container a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18 in OCI runtime" time="2024-12-21T11:31:45-05:00" level=debug msg="Starting container a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18 with command [/bin/busybox-extras httpd -f -p 80]" time="2024-12-21T11:31:45-05:00" level=debug msg="Started container a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18" time="2024-12-21T11:31:45-05:00" level=debug msg="Called kube.PersistentPostRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2024-12-21T11:31:45-05:00" level=debug msg="Shutting down engines" time="2024-12-21T11:31:45-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=24145 Dec 21 11:31:45 managed-node2 python3.12[24137]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Dec 21 11:31:45 managed-node2 sudo[24134]: pam_unix(sudo:session): session closed for user podman_basic_user Dec 21 11:31:46 managed-node2 sudo[24426]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wivgzbfahdqhicbbuvtpitoziijccoml ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1734798706.2267947-14448-8737191580808/AnsiballZ_systemd.py' Dec 21 11:31:46 managed-node2 sudo[24426]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-24426) opened. Dec 21 11:31:46 managed-node2 sudo[24426]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Dec 21 11:31:46 managed-node2 python3.12[24429]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 21 11:31:46 managed-node2 systemd[22955]: Reload requested from client PID 24430 ('systemctl')... Dec 21 11:31:46 managed-node2 systemd[22955]: Reloading... Dec 21 11:31:46 managed-node2 systemd[22955]: Reloading finished in 44 ms. Dec 21 11:31:46 managed-node2 sudo[24426]: pam_unix(sudo:session): session closed for user podman_basic_user Dec 21 11:31:47 managed-node2 sudo[24613]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hdqbqkzbciigachoxfgninvtwlivcuml ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1734798706.9334629-14490-42851388239476/AnsiballZ_systemd.py' Dec 21 11:31:47 managed-node2 sudo[24613]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-24613) opened. Dec 21 11:31:47 managed-node2 sudo[24613]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Dec 21 11:31:47 managed-node2 python3.12[24616]: ansible-systemd Invoked with name=podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service scope=user enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Dec 21 11:31:47 managed-node2 systemd[22955]: Reload requested from client PID 24619 ('systemctl')... Dec 21 11:31:47 managed-node2 systemd[22955]: Reloading... Dec 21 11:31:47 managed-node2 systemd[22955]: Reloading finished in 44 ms. Dec 21 11:31:47 managed-node2 sudo[24613]: pam_unix(sudo:session): session closed for user podman_basic_user Dec 21 11:31:47 managed-node2 sudo[24801]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xvacrifcbgtqxqivedbyonzgyscppauw ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1734798707.6772003-14530-270047551956578/AnsiballZ_systemd.py' Dec 21 11:31:47 managed-node2 sudo[24801]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-24801) opened. Dec 21 11:31:47 managed-node2 sudo[24801]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Dec 21 11:31:48 managed-node2 python3.12[24804]: ansible-systemd Invoked with name=podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Dec 21 11:31:48 managed-node2 systemd[22955]: Created slice app-podman\x2dkube.slice - Slice /app/podman-kube. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 83. Dec 21 11:31:48 managed-node2 systemd[22955]: Starting podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 71. Dec 21 11:31:48 managed-node2 aardvark-dns[24229]: Received SIGHUP Dec 21 11:31:48 managed-node2 aardvark-dns[24229]: Successfully parsed config Dec 21 11:31:48 managed-node2 aardvark-dns[24229]: Listen v4 ip {} Dec 21 11:31:48 managed-node2 aardvark-dns[24229]: Listen v6 ip {} Dec 21 11:31:48 managed-node2 aardvark-dns[24229]: No configuration found stopping the sever Dec 21 11:31:48 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Dec 21 11:31:48 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Dec 21 11:31:48 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Dec 21 11:31:48 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd)" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=info msg="Using sqlite as database backend" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="systemd-logind: Unknown object '/'." Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Using graph driver overlay" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Using run root /run/user/3001/containers" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Using transient store: false" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Cached value indicated that overlay is supported" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Cached value indicated that overlay is supported" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Cached value indicated that metacopy is not being used" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Cached value indicated that native-diff is usable" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Initializing event backend file" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=info msg="Setting parallel job count to 7" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only db0839eafbe8b5b2b368853e55b0de10d729bc6837a5ba24c9f732a7316548bd)" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=debug msg="Shutting down engines" Dec 21 11:31:48 managed-node2 /usr/bin/podman[24818]: time="2024-12-21T11:31:48-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=24818 Dec 21 11:31:58 managed-node2 podman[24807]: time="2024-12-21T11:31:58-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd1-httpd1 in 10 seconds, resorting to SIGKILL" Dec 21 11:31:58 managed-node2 conmon[24250]: conmon a6748a195df11a7b054e : container 24252 exited with status 137 Dec 21 11:31:58 managed-node2 /usr/bin/podman[24838]: time="2024-12-21T11:31:58-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18)" Dec 21 11:31:58 managed-node2 /usr/bin/podman[24838]: time="2024-12-21T11:31:58-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Dec 21 11:31:58 managed-node2 /usr/bin/podman[24838]: time="2024-12-21T11:31:58-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Dec 21 11:31:58 managed-node2 /usr/bin/podman[24838]: time="2024-12-21T11:31:58-05:00" level=info msg="Using sqlite as database backend" Dec 21 11:31:58 managed-node2 /usr/bin/podman[24838]: time="2024-12-21T11:31:58-05:00" level=debug msg="systemd-logind: Unknown object '/'." Dec 21 11:31:58 managed-node2 /usr/bin/podman[24838]: time="2024-12-21T11:31:58-05:00" level=debug msg="Using graph driver overlay" Dec 21 11:31:58 managed-node2 /usr/bin/podman[24838]: time="2024-12-21T11:31:58-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" Dec 21 11:31:58 managed-node2 /usr/bin/podman[24838]: time="2024-12-21T11:31:58-05:00" level=debug msg="Using run root /run/user/3001/containers" Dec 21 11:31:58 managed-node2 /usr/bin/podman[24838]: time="2024-12-21T11:31:58-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Dec 21 11:31:58 managed-node2 /usr/bin/podman[24838]: time="2024-12-21T11:31:58-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Dec 21 11:31:58 managed-node2 /usr/bin/podman[24838]: time="2024-12-21T11:31:58-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Dec 21 11:31:58 managed-node2 /usr/bin/podman[24838]: time="2024-12-21T11:31:58-05:00" level=debug msg="Using transient store: false" Dec 21 11:31:58 managed-node2 /usr/bin/podman[24838]: time="2024-12-21T11:31:58-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Dec 21 11:31:58 managed-node2 /usr/bin/podman[24838]: time="2024-12-21T11:31:58-05:00" level=debug msg="Cached value indicated that overlay is supported" Dec 21 11:31:58 managed-node2 /usr/bin/podman[24838]: time="2024-12-21T11:31:58-05:00" level=debug msg="Cached value indicated that overlay is supported" Dec 21 11:31:58 managed-node2 /usr/bin/podman[24838]: time="2024-12-21T11:31:58-05:00" level=debug msg="Cached value indicated that metacopy is not being used" Dec 21 11:31:58 managed-node2 /usr/bin/podman[24838]: time="2024-12-21T11:31:58-05:00" level=debug msg="Cached value indicated that native-diff is usable" Dec 21 11:31:58 managed-node2 systemd[22955]: Stopping libpod-conmon-a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18.scope... ░░ Subject: A stop job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has begun execution. ░░ ░░ The job identifier is 85. Dec 21 11:31:58 managed-node2 /usr/bin/podman[24838]: time="2024-12-21T11:31:58-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" Dec 21 11:31:58 managed-node2 systemd[22955]: Stopped libpod-conmon-a6748a195df11a7b054e168ee23053bb9d633b4dce892cc9d5e84cab54e94e18.scope. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 85 and the job result is done. Dec 21 11:31:58 managed-node2 systemd[22955]: Removed slice user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice - cgroup user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 84 and the job result is done. Dec 21 11:31:58 managed-node2 systemd[22955]: user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice: Failed to open /run/user/3001/systemd/transient/user-libpod_pod_a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f.slice: No such file or directory Dec 21 11:31:58 managed-node2 podman[24807]: Pods stopped: Dec 21 11:31:58 managed-node2 podman[24807]: a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f Dec 21 11:31:58 managed-node2 podman[24807]: Pods removed: Dec 21 11:31:58 managed-node2 podman[24807]: a1abeaf778ea006deaf571cb928ce50e3388898a893abd554306706b9682a10f Dec 21 11:31:58 managed-node2 podman[24807]: Secrets removed: Dec 21 11:31:58 managed-node2 podman[24807]: Volumes removed: Dec 21 11:31:58 managed-node2 systemd[22955]: Created slice user-libpod_pod_9556bd485cb17e948b14fa8acbafacdc25b3d8209d11a2eebe718d274f2a4356.slice - cgroup user-libpod_pod_9556bd485cb17e948b14fa8acbafacdc25b3d8209d11a2eebe718d274f2a4356.slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 86. Dec 21 11:31:58 managed-node2 systemd[22955]: Started libpod-c089df96847b506f85090c1b8671966e84959519256edf7dab9438abd05766c2.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 90. Dec 21 11:31:58 managed-node2 systemd[22955]: Started rootless-netns-0a95b04c.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 94. Dec 21 11:31:58 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Dec 21 11:31:58 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Dec 21 11:31:58 managed-node2 kernel: veth0: entered allmulticast mode Dec 21 11:31:58 managed-node2 kernel: veth0: entered promiscuous mode Dec 21 11:31:58 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Dec 21 11:31:58 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Dec 21 11:31:58 managed-node2 systemd[22955]: Started run-r22328a1bdabc44c488dffe68a598f778.scope - /usr/libexec/podman/aardvark-dns --config /run/user/3001/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 98. Dec 21 11:31:58 managed-node2 systemd[22955]: Started libpod-adb495b2f075e8901bb800ade0cacdb24aa4b5962c9cfc594d9f7d30984191e3.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 102. Dec 21 11:31:58 managed-node2 systemd[22955]: Started libpod-c599af12ec5a1d1a2998f0d3cb1aac751d459d1136207e789c19ad71328049a4.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 107. Dec 21 11:31:58 managed-node2 podman[24807]: Pod: Dec 21 11:31:58 managed-node2 podman[24807]: 9556bd485cb17e948b14fa8acbafacdc25b3d8209d11a2eebe718d274f2a4356 Dec 21 11:31:58 managed-node2 podman[24807]: Container: Dec 21 11:31:58 managed-node2 podman[24807]: c599af12ec5a1d1a2998f0d3cb1aac751d459d1136207e789c19ad71328049a4 Dec 21 11:31:58 managed-node2 systemd[22955]: Started podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 71. Dec 21 11:31:58 managed-node2 sudo[24801]: pam_unix(sudo:session): session closed for user podman_basic_user Dec 21 11:31:59 managed-node2 python3.12[25026]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Dec 21 11:32:00 managed-node2 python3.12[25158]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:32:01 managed-node2 python3.12[25291]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd2.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:32:03 managed-node2 python3.12[25423]: ansible-file Invoked with path=/tmp/lsr_9pquyim__podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:32:03 managed-node2 python3.12[25554]: ansible-file Invoked with path=/tmp/lsr_9pquyim__podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:32:04 managed-node2 systemd[4479]: Created slice background.slice - User Background Tasks Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 14. Dec 21 11:32:04 managed-node2 systemd[4479]: Starting systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Dec 21 11:32:04 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Dec 21 11:32:04 managed-node2 systemd[4479]: Finished systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Dec 21 11:32:04 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Dec 21 11:32:07 managed-node2 podman[25718]: 2024-12-21 11:32:07.27244711 -0500 EST m=+2.752253327 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Dec 21 11:32:07 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Dec 21 11:32:07 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Dec 21 11:32:07 managed-node2 python3.12[25864]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:32:08 managed-node2 python3.12[25995]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:32:08 managed-node2 python3.12[26126]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 21 11:32:09 managed-node2 python3.12[26231]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd2.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1734798728.406394-15466-99731724956341/.source.yml _original_basename=.zqj4emsk follow=False checksum=22d57ee085f96ddbcb2a5dc8bca5b7a52aeee580 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:32:09 managed-node2 python3.12[26362]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Dec 21 11:32:09 managed-node2 systemd[1]: Created slice machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice - cgroup machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice. ░░ Subject: A start job for unit machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice has finished successfully. ░░ ░░ The job identifier is 1990. Dec 21 11:32:09 managed-node2 podman[26369]: 2024-12-21 11:32:09.671403128 -0500 EST m=+0.080443089 container create fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1 (image=localhost/podman-pause:5.3.1-1733097600, name=a47ffa0659b6-infra, pod_id=a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688, io.buildah.version=1.38.0) Dec 21 11:32:09 managed-node2 podman[26369]: 2024-12-21 11:32:09.677566753 -0500 EST m=+0.086606799 pod create a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688 (image=, name=httpd2) Dec 21 11:32:09 managed-node2 podman[26369]: 2024-12-21 11:32:09.707533382 -0500 EST m=+0.116573407 container create 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688, io.containers.autoupdate=registry, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, app=test) Dec 21 11:32:09 managed-node2 podman[26369]: 2024-12-21 11:32:09.680552757 -0500 EST m=+0.089592781 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Dec 21 11:32:09 managed-node2 NetworkManager[775]: [1734798729.7286] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/3) Dec 21 11:32:09 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Dec 21 11:32:09 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Dec 21 11:32:09 managed-node2 kernel: veth0: entered allmulticast mode Dec 21 11:32:09 managed-node2 kernel: veth0: entered promiscuous mode Dec 21 11:32:09 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Dec 21 11:32:09 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Dec 21 11:32:09 managed-node2 NetworkManager[775]: [1734798729.7425] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/4) Dec 21 11:32:09 managed-node2 NetworkManager[775]: [1734798729.7477] device (veth0): carrier: link connected Dec 21 11:32:09 managed-node2 NetworkManager[775]: [1734798729.7483] device (podman1): carrier: link connected Dec 21 11:32:09 managed-node2 (udev-worker)[26385]: Network interface NamePolicy= disabled on kernel command line. Dec 21 11:32:09 managed-node2 (udev-worker)[26386]: Network interface NamePolicy= disabled on kernel command line. Dec 21 11:32:09 managed-node2 NetworkManager[775]: [1734798729.8035] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Dec 21 11:32:09 managed-node2 NetworkManager[775]: [1734798729.8040] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Dec 21 11:32:09 managed-node2 NetworkManager[775]: [1734798729.8047] device (podman1): Activation: starting connection 'podman1' (ecb2a905-7bae-48ad-b953-99d577d9776b) Dec 21 11:32:09 managed-node2 NetworkManager[775]: [1734798729.8048] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Dec 21 11:32:09 managed-node2 NetworkManager[775]: [1734798729.8095] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external') Dec 21 11:32:09 managed-node2 NetworkManager[775]: [1734798729.8097] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external') Dec 21 11:32:09 managed-node2 NetworkManager[775]: [1734798729.8099] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Dec 21 11:32:09 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 1997. Dec 21 11:32:09 managed-node2 NetworkManager[775]: [1734798729.8451] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Dec 21 11:32:09 managed-node2 NetworkManager[775]: [1734798729.8455] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external') Dec 21 11:32:09 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 1997. Dec 21 11:32:09 managed-node2 NetworkManager[775]: [1734798729.8459] device (podman1): Activation: successful, device activated. Dec 21 11:32:09 managed-node2 systemd[1]: Started run-r735556f5d4974f57a4b69c88f3c80d45.scope - /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-r735556f5d4974f57a4b69c88f3c80d45.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-r735556f5d4974f57a4b69c88f3c80d45.scope has finished successfully. ░░ ░░ The job identifier is 2076. Dec 21 11:32:09 managed-node2 aardvark-dns[26411]: starting aardvark on a child with pid 26421 Dec 21 11:32:09 managed-node2 aardvark-dns[26421]: Successfully parsed config Dec 21 11:32:09 managed-node2 aardvark-dns[26421]: Listen v4 ip {"podman-default-kube-network": [10.89.0.1]} Dec 21 11:32:09 managed-node2 aardvark-dns[26421]: Listen v6 ip {} Dec 21 11:32:09 managed-node2 aardvark-dns[26421]: Using the following upstream servers: [10.29.169.13:53, 10.29.170.12:53, 10.2.32.1:53] Dec 21 11:32:09 managed-node2 systemd[1]: Started libpod-conmon-fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1.scope. ░░ Subject: A start job for unit libpod-conmon-fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1.scope has finished successfully. ░░ ░░ The job identifier is 2082. Dec 21 11:32:09 managed-node2 conmon[26426]: conmon fd6f390a6db243ce508b : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/13/attach} Dec 21 11:32:09 managed-node2 conmon[26426]: conmon fd6f390a6db243ce508b : terminal_ctrl_fd: 13 Dec 21 11:32:09 managed-node2 conmon[26426]: conmon fd6f390a6db243ce508b : winsz read side: 17, winsz write side: 18 Dec 21 11:32:09 managed-node2 systemd[1]: Started libpod-fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1.scope - libcrun container. ░░ Subject: A start job for unit libpod-fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1.scope has finished successfully. ░░ ░░ The job identifier is 2089. Dec 21 11:32:09 managed-node2 conmon[26426]: conmon fd6f390a6db243ce508b : container PID: 26428 Dec 21 11:32:09 managed-node2 podman[26369]: 2024-12-21 11:32:09.943028043 -0500 EST m=+0.352068161 container init fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1 (image=localhost/podman-pause:5.3.1-1733097600, name=a47ffa0659b6-infra, pod_id=a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688, io.buildah.version=1.38.0) Dec 21 11:32:09 managed-node2 podman[26369]: 2024-12-21 11:32:09.946398608 -0500 EST m=+0.355438546 container start fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1 (image=localhost/podman-pause:5.3.1-1733097600, name=a47ffa0659b6-infra, pod_id=a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688, io.buildah.version=1.38.0) Dec 21 11:32:09 managed-node2 systemd[1]: Started libpod-conmon-9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70.scope. ░░ Subject: A start job for unit libpod-conmon-9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70.scope has finished successfully. ░░ ░░ The job identifier is 2096. Dec 21 11:32:09 managed-node2 conmon[26431]: conmon 9cb727865dc30cdb1991 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/12/attach} Dec 21 11:32:09 managed-node2 conmon[26431]: conmon 9cb727865dc30cdb1991 : terminal_ctrl_fd: 12 Dec 21 11:32:09 managed-node2 conmon[26431]: conmon 9cb727865dc30cdb1991 : winsz read side: 16, winsz write side: 17 Dec 21 11:32:09 managed-node2 systemd[1]: Started libpod-9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70.scope - libcrun container. ░░ Subject: A start job for unit libpod-9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70.scope has finished successfully. ░░ ░░ The job identifier is 2103. Dec 21 11:32:10 managed-node2 conmon[26431]: conmon 9cb727865dc30cdb1991 : container PID: 26433 Dec 21 11:32:10 managed-node2 podman[26369]: 2024-12-21 11:32:10.010181831 -0500 EST m=+0.419221899 container init 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Dec 21 11:32:10 managed-node2 podman[26369]: 2024-12-21 11:32:10.013437493 -0500 EST m=+0.422477550 container start 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Dec 21 11:32:10 managed-node2 podman[26369]: 2024-12-21 11:32:10.020079373 -0500 EST m=+0.429119342 pod start a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688 (image=, name=httpd2) Dec 21 11:32:10 managed-node2 python3.12[26362]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml Dec 21 11:32:10 managed-node2 python3.12[26362]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod: a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688 Container: 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70 Dec 21 11:32:10 managed-node2 python3.12[26362]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2024-12-21T11:32:09-05:00" level=info msg="/usr/bin/podman filtering at log level debug" time="2024-12-21T11:32:09-05:00" level=debug msg="Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2024-12-21T11:32:09-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2024-12-21T11:32:09-05:00" level=info msg="Using sqlite as database backend" time="2024-12-21T11:32:09-05:00" level=debug msg="Using graph driver overlay" time="2024-12-21T11:32:09-05:00" level=debug msg="Using graph root /var/lib/containers/storage" time="2024-12-21T11:32:09-05:00" level=debug msg="Using run root /run/containers/storage" time="2024-12-21T11:32:09-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" time="2024-12-21T11:32:09-05:00" level=debug msg="Using tmp dir /run/libpod" time="2024-12-21T11:32:09-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" time="2024-12-21T11:32:09-05:00" level=debug msg="Using transient store: false" time="2024-12-21T11:32:09-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2024-12-21T11:32:09-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2024-12-21T11:32:09-05:00" level=debug msg="Cached value indicated that metacopy is being used" time="2024-12-21T11:32:09-05:00" level=debug msg="Cached value indicated that native-diff is not being used" time="2024-12-21T11:32:09-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" time="2024-12-21T11:32:09-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" time="2024-12-21T11:32:09-05:00" level=debug msg="Initializing event backend journald" time="2024-12-21T11:32:09-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" time="2024-12-21T11:32:09-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2024-12-21T11:32:09-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2024-12-21T11:32:09-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2024-12-21T11:32:09-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" time="2024-12-21T11:32:09-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2024-12-21T11:32:09-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2024-12-21T11:32:09-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2024-12-21T11:32:09-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2024-12-21T11:32:09-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" time="2024-12-21T11:32:09-05:00" level=info msg="Setting parallel job count to 7" time="2024-12-21T11:32:09-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network 77beb6ebd60e75ff206e6ae63f4f50d6164d432e696e9e72f903a31005b7589f bridge podman1 2024-12-21 11:29:51.422756656 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2024-12-21T11:32:09-05:00" level=debug msg="Successfully loaded 2 networks" time="2024-12-21T11:32:09-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2024-12-21T11:32:09-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-21T11:32:09-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2024-12-21T11:32:09-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@e20c1c9f26b9ea9ec461cf486e641689385d0ba8255636809818a6a36925e38d\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2024-12-21T11:32:09-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@e20c1c9f26b9ea9ec461cf486e641689385d0ba8255636809818a6a36925e38d)" time="2024-12-21T11:32:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:e20c1c9f26b9ea9ec461cf486e641689385d0ba8255636809818a6a36925e38d\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Pod using bridge network mode" time="2024-12-21T11:32:09-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice for parent machine.slice and name libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688" time="2024-12-21T11:32:09-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice" time="2024-12-21T11:32:09-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice" time="2024-12-21T11:32:09-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2024-12-21T11:32:09-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-21T11:32:09-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2024-12-21T11:32:09-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@e20c1c9f26b9ea9ec461cf486e641689385d0ba8255636809818a6a36925e38d\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2024-12-21T11:32:09-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@e20c1c9f26b9ea9ec461cf486e641689385d0ba8255636809818a6a36925e38d)" time="2024-12-21T11:32:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:e20c1c9f26b9ea9ec461cf486e641689385d0ba8255636809818a6a36925e38d\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Inspecting image e20c1c9f26b9ea9ec461cf486e641689385d0ba8255636809818a6a36925e38d" time="2024-12-21T11:32:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:e20c1c9f26b9ea9ec461cf486e641689385d0ba8255636809818a6a36925e38d\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Inspecting image e20c1c9f26b9ea9ec461cf486e641689385d0ba8255636809818a6a36925e38d" time="2024-12-21T11:32:09-05:00" level=debug msg="Inspecting image e20c1c9f26b9ea9ec461cf486e641689385d0ba8255636809818a6a36925e38d" time="2024-12-21T11:32:09-05:00" level=debug msg="using systemd mode: false" time="2024-12-21T11:32:09-05:00" level=debug msg="setting container name a47ffa0659b6-infra" time="2024-12-21T11:32:09-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Allocated lock 1 for container fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1" time="2024-12-21T11:32:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:e20c1c9f26b9ea9ec461cf486e641689385d0ba8255636809818a6a36925e38d\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are supported" time="2024-12-21T11:32:09-05:00" level=debug msg="Created container \"fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Container \"fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1\" has work directory \"/var/lib/containers/storage/overlay-containers/fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1/userdata\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Container \"fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1\" has run directory \"/run/containers/storage/overlay-containers/fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1/userdata\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-21T11:32:09-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-21T11:32:09-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-12-21T11:32:09-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-21T11:32:09-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-12-21T11:32:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2024-12-21T11:32:09-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-21T11:32:09-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-21T11:32:09-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-12-21T11:32:09-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-21T11:32:09-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-12-21T11:32:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-21T11:32:09-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-21T11:32:09-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-12-21T11:32:09-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-21T11:32:09-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-12-21T11:32:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-12-21T11:32:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:32:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-21T11:32:09-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-21T11:32:09-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-12-21T11:32:09-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-21T11:32:09-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-12-21T11:32:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-12-21T11:32:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:32:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-12-21T11:32:09-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-12-21T11:32:09-05:00" level=debug msg="using systemd mode: false" time="2024-12-21T11:32:09-05:00" level=debug msg="adding container to pod httpd2" time="2024-12-21T11:32:09-05:00" level=debug msg="setting container name httpd2-httpd2" time="2024-12-21T11:32:09-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2024-12-21T11:32:09-05:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2024-12-21T11:32:09-05:00" level=debug msg="Adding mount /proc" time="2024-12-21T11:32:09-05:00" level=debug msg="Adding mount /dev" time="2024-12-21T11:32:09-05:00" level=debug msg="Adding mount /dev/pts" time="2024-12-21T11:32:09-05:00" level=debug msg="Adding mount /dev/mqueue" time="2024-12-21T11:32:09-05:00" level=debug msg="Adding mount /sys" time="2024-12-21T11:32:09-05:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2024-12-21T11:32:09-05:00" level=debug msg="Allocated lock 2 for container 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70" time="2024-12-21T11:32:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Created container \"9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Container \"9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70\" has work directory \"/var/lib/containers/storage/overlay-containers/9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70/userdata\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Container \"9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70\" has run directory \"/run/containers/storage/overlay-containers/9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70/userdata\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Strongconnecting node 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70" time="2024-12-21T11:32:09-05:00" level=debug msg="Pushed 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70 onto stack" time="2024-12-21T11:32:09-05:00" level=debug msg="Recursing to successor node fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1" time="2024-12-21T11:32:09-05:00" level=debug msg="Strongconnecting node fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1" time="2024-12-21T11:32:09-05:00" level=debug msg="Pushed fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1 onto stack" time="2024-12-21T11:32:09-05:00" level=debug msg="Finishing node fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1. Popped fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1 off stack" time="2024-12-21T11:32:09-05:00" level=debug msg="Finishing node 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70. Popped 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70 off stack" time="2024-12-21T11:32:09-05:00" level=debug msg="Made network namespace at /run/netns/netns-5e59e54c-138b-0296-4440-fd9a61a17fce for container fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1" [DEBUG netavark::network::validation] Validating network namespace... [DEBUG netavark::commands::setup] Setting up... time="2024-12-21T11:32:09-05:00" level=debug msg="overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/ZXMCTYPFUO6ETCCWBEVCYNUKHB,upperdir=/var/lib/containers/storage/overlay/6962fe750f8622474540467bf968da919219616184ee268561b3414ce705e7d5/diff,workdir=/var/lib/containers/storage/overlay/6962fe750f8622474540467bf968da919219616184ee268561b3414ce705e7d5/work,nodev,metacopy=on,context=\"system_u:object_r:container_file_t:s0:c418,c730\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Mounted container \"fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1\" at \"/var/lib/containers/storage/overlay/6962fe750f8622474540467bf968da919219616184ee268561b3414ce705e7d5/merged\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Created root filesystem for container fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1 at /var/lib/containers/storage/overlay/6962fe750f8622474540467bf968da919219616184ee268561b3414ce705e7d5/merged" [INFO netavark::firewall] Using nftables firewall driver [DEBUG netavark::network::bridge] Setup network podman-default-kube-network [DEBUG netavark::network::bridge] Container interface name: eth0 with IP addresses [10.89.0.2/24] [DEBUG netavark::network::bridge] Bridge name: podman1 with IP addresses [10.89.0.1/24] [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.ip_forward to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/podman1/rp_filter to 2 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv6/conf/eth0/autoconf to 0 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/arp_notify to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/rp_filter to 2 [INFO netavark::network::netlink] Adding route (dest: 0.0.0.0/0 ,gw: 10.89.0.1, metric 100) [DEBUG netavark::firewall::firewalld] Adding firewalld rules for network 10.89.0.0/24 [DEBUG netavark::firewall::firewalld] Adding subnet 10.89.0.0/24 to zone trusted as source [INFO netavark::firewall::nft] Creating container chain nv_77beb6eb_10_89_0_0_nm24 [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.conf.podman1.route_localnet to 1 [DEBUG netavark::dns::aardvark] Spawning aardvark server [DEBUG netavark::dns::aardvark] start aardvark-dns: ["systemd-run", "-q", "--scope", "/usr/libexec/podman/aardvark-dns", "--config", "/run/containers/networks/aardvark-dns", "-p", "53", "run"] [DEBUG netavark::commands::setup] { "podman-default-kube-network": StatusBlock { dns_search_domains: Some( [ "dns.podman", ], ), dns_server_ips: Some( [ 10.89.0.1, ], ), interfaces: Some( { "eth0": NetInterface { mac_address: "16:d0:4d:6b:ea:f8", subnets: Some( [ NetAddress { gateway: Some( 10.89.0.1, ), ipnet: 10.89.0.2/24, }, ], ), }, }, ), }, } [DEBUG netavark::commands::setup] Setup complete time="2024-12-21T11:32:09-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2024-12-21T11:32:09-05:00" level=debug msg="Setting Cgroups for container fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1 to machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice:libpod:fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1" time="2024-12-21T11:32:09-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2024-12-21T11:32:09-05:00" level=debug msg="Workdir \"/\" resolved to host path \"/var/lib/containers/storage/overlay/6962fe750f8622474540467bf968da919219616184ee268561b3414ce705e7d5/merged\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Created OCI spec for container fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1 at /var/lib/containers/storage/overlay-containers/fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1/userdata/config.json" time="2024-12-21T11:32:09-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice for parent machine.slice and name libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688" time="2024-12-21T11:32:09-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice" time="2024-12-21T11:32:09-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice" time="2024-12-21T11:32:09-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2024-12-21T11:32:09-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1 -u fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1/userdata -p /run/containers/storage/overlay-containers/fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1/userdata/pidfile -n a47ffa0659b6-infra --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1 --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1]" time="2024-12-21T11:32:09-05:00" level=info msg="Running conmon under slice machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice and unitName libpod-conmon-fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1.scope" time="2024-12-21T11:32:09-05:00" level=debug msg="Received: 26428" time="2024-12-21T11:32:09-05:00" level=info msg="Got Conmon PID as 26426" time="2024-12-21T11:32:09-05:00" level=debug msg="Created container fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1 in OCI runtime" time="2024-12-21T11:32:09-05:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2024-12-21T11:32:09-05:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2024-12-21T11:32:09-05:00" level=debug msg="Starting container fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1 with command [/catatonit -P]" time="2024-12-21T11:32:09-05:00" level=debug msg="Started container fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1" time="2024-12-21T11:32:09-05:00" level=debug msg="overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/VVWDSDX66M2DJOL5OSC7FCBPMU,upperdir=/var/lib/containers/storage/overlay/ed541d34d3eb7c9ee5c2086f113fed4c1dfed1009c0fdfa277d87228f9fd076b/diff,workdir=/var/lib/containers/storage/overlay/ed541d34d3eb7c9ee5c2086f113fed4c1dfed1009c0fdfa277d87228f9fd076b/work,nodev,metacopy=on,context=\"system_u:object_r:container_file_t:s0:c418,c730\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Mounted container \"9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70\" at \"/var/lib/containers/storage/overlay/ed541d34d3eb7c9ee5c2086f113fed4c1dfed1009c0fdfa277d87228f9fd076b/merged\"" time="2024-12-21T11:32:09-05:00" level=debug msg="Created root filesystem for container 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70 at /var/lib/containers/storage/overlay/ed541d34d3eb7c9ee5c2086f113fed4c1dfed1009c0fdfa277d87228f9fd076b/merged" time="2024-12-21T11:32:09-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2024-12-21T11:32:09-05:00" level=debug msg="Setting Cgroups for container 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70 to machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice:libpod:9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70" time="2024-12-21T11:32:09-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2024-12-21T11:32:09-05:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2024-12-21T11:32:09-05:00" level=debug msg="Created OCI spec for container 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70 at /var/lib/containers/storage/overlay-containers/9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70/userdata/config.json" time="2024-12-21T11:32:09-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice for parent machine.slice and name libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688" time="2024-12-21T11:32:09-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice" time="2024-12-21T11:32:09-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice" time="2024-12-21T11:32:09-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2024-12-21T11:32:09-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70 -u 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70/userdata -p /run/containers/storage/overlay-containers/9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70/userdata/pidfile -n httpd2-httpd2 --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70 --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70]" time="2024-12-21T11:32:09-05:00" level=info msg="Running conmon under slice machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice and unitName libpod-conmon-9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70.scope" time="2024-12-21T11:32:10-05:00" level=debug msg="Received: 26433" time="2024-12-21T11:32:10-05:00" level=info msg="Got Conmon PID as 26431" time="2024-12-21T11:32:10-05:00" level=debug msg="Created container 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70 in OCI runtime" time="2024-12-21T11:32:10-05:00" level=debug msg="Starting container 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70 with command [/bin/busybox-extras httpd -f -p 80]" time="2024-12-21T11:32:10-05:00" level=debug msg="Started container 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70" time="2024-12-21T11:32:10-05:00" level=debug msg="Called kube.PersistentPostRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2024-12-21T11:32:10-05:00" level=debug msg="Shutting down engines" time="2024-12-21T11:32:10-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=26369 Dec 21 11:32:10 managed-node2 python3.12[26362]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Dec 21 11:32:10 managed-node2 python3.12[26565]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 21 11:32:10 managed-node2 systemd[1]: Reload requested from client PID 26566 ('systemctl') (unit session-5.scope)... Dec 21 11:32:10 managed-node2 systemd[1]: Reloading... Dec 21 11:32:10 managed-node2 systemd[1]: Reloading finished in 212 ms. Dec 21 11:32:11 managed-node2 python3.12[26752]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Dec 21 11:32:11 managed-node2 systemd[1]: Reload requested from client PID 26755 ('systemctl') (unit session-5.scope)... Dec 21 11:32:11 managed-node2 systemd[1]: Reloading... Dec 21 11:32:11 managed-node2 systemd[1]: Reloading finished in 211 ms. Dec 21 11:32:12 managed-node2 python3.12[26941]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Dec 21 11:32:12 managed-node2 systemd[1]: Created slice system-podman\x2dkube.slice - Slice /system/podman-kube. ░░ Subject: A start job for unit system-podman\x2dkube.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit system-podman\x2dkube.slice has finished successfully. ░░ ░░ The job identifier is 2188. Dec 21 11:32:12 managed-node2 systemd[1]: Starting podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun execution. ░░ ░░ The job identifier is 2110. Dec 21 11:32:12 managed-node2 podman[26945]: 2024-12-21 11:32:12.499598585 -0500 EST m=+0.026259019 pod stop a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688 (image=, name=httpd2) Dec 21 11:32:12 managed-node2 systemd[1]: libpod-fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1.scope has successfully entered the 'dead' state. Dec 21 11:32:12 managed-node2 podman[26945]: 2024-12-21 11:32:12.523956412 -0500 EST m=+0.050617110 container died fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1 (image=localhost/podman-pause:5.3.1-1733097600, name=a47ffa0659b6-infra, io.buildah.version=1.38.0) Dec 21 11:32:12 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Dec 21 11:32:12 managed-node2 aardvark-dns[26421]: Received SIGHUP Dec 21 11:32:12 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Dec 21 11:32:12 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Dec 21 11:32:12 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Dec 21 11:32:12 managed-node2 aardvark-dns[26421]: Successfully parsed config Dec 21 11:32:12 managed-node2 aardvark-dns[26421]: Listen v4 ip {} Dec 21 11:32:12 managed-node2 aardvark-dns[26421]: Listen v6 ip {} Dec 21 11:32:12 managed-node2 aardvark-dns[26421]: No configuration found stopping the sever Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1)" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=info msg="Using sqlite as database backend" Dec 21 11:32:12 managed-node2 systemd[1]: run-r735556f5d4974f57a4b69c88f3c80d45.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-r735556f5d4974f57a4b69c88f3c80d45.scope has successfully entered the 'dead' state. Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Using graph driver overlay" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Using graph root /var/lib/containers/storage" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Using run root /run/containers/storage" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Using tmp dir /run/libpod" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Using transient store: false" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Cached value indicated that overlay is supported" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Cached value indicated that overlay is supported" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Cached value indicated that metacopy is being used" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Cached value indicated that native-diff is not being used" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Initializing event backend journald" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=info msg="Setting parallel job count to 7" Dec 21 11:32:12 managed-node2 NetworkManager[775]: [1734798732.5706] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Dec 21 11:32:12 managed-node2 systemd[1]: run-netns-netns\x2d5e59e54c\x2d138b\x2d0296\x2d4440\x2dfd9a61a17fce.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2d5e59e54c\x2d138b\x2d0296\x2d4440\x2dfd9a61a17fce.mount has successfully entered the 'dead' state. Dec 21 11:32:12 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1-userdata-shm.mount has successfully entered the 'dead' state. Dec 21 11:32:12 managed-node2 systemd[1]: var-lib-containers-storage-overlay-6962fe750f8622474540467bf968da919219616184ee268561b3414ce705e7d5-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-6962fe750f8622474540467bf968da919219616184ee268561b3414ce705e7d5-merged.mount has successfully entered the 'dead' state. Dec 21 11:32:12 managed-node2 podman[26945]: 2024-12-21 11:32:12.647414868 -0500 EST m=+0.174075678 container cleanup fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1 (image=localhost/podman-pause:5.3.1-1733097600, name=a47ffa0659b6-infra, pod_id=a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688, io.buildah.version=1.38.0) Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1)" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=debug msg="Shutting down engines" Dec 21 11:32:12 managed-node2 /usr/bin/podman[26956]: time="2024-12-21T11:32:12-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=26956 Dec 21 11:32:12 managed-node2 systemd[1]: libpod-conmon-fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1.scope has successfully entered the 'dead' state. Dec 21 11:32:22 managed-node2 podman[26945]: time="2024-12-21T11:32:22-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd2-httpd2 in 10 seconds, resorting to SIGKILL" Dec 21 11:32:22 managed-node2 systemd[1]: libpod-9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70.scope has successfully entered the 'dead' state. Dec 21 11:32:22 managed-node2 conmon[26431]: conmon 9cb727865dc30cdb1991 : container 26433 exited with status 137 Dec 21 11:32:22 managed-node2 conmon[26431]: conmon 9cb727865dc30cdb1991 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice/libpod-9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70.scope/container/memory.events Dec 21 11:32:22 managed-node2 podman[26945]: 2024-12-21 11:32:22.546530099 -0500 EST m=+10.073190512 container died 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70)" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=info msg="Using sqlite as database backend" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Using graph driver overlay" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Using graph root /var/lib/containers/storage" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Using run root /run/containers/storage" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Using tmp dir /run/libpod" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Using transient store: false" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Cached value indicated that overlay is supported" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Cached value indicated that overlay is supported" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Cached value indicated that metacopy is being used" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Cached value indicated that native-diff is not being used" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Initializing event backend journald" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=info msg="Setting parallel job count to 7" Dec 21 11:32:22 managed-node2 systemd[1]: var-lib-containers-storage-overlay-ed541d34d3eb7c9ee5c2086f113fed4c1dfed1009c0fdfa277d87228f9fd076b-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-ed541d34d3eb7c9ee5c2086f113fed4c1dfed1009c0fdfa277d87228f9fd076b-merged.mount has successfully entered the 'dead' state. Dec 21 11:32:22 managed-node2 podman[26945]: 2024-12-21 11:32:22.585752966 -0500 EST m=+10.112413363 container cleanup 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70)" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=debug msg="Shutting down engines" Dec 21 11:32:22 managed-node2 /usr/bin/podman[26979]: time="2024-12-21T11:32:22-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=26979 Dec 21 11:32:22 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Dec 21 11:32:22 managed-node2 systemd[1]: Stopping libpod-conmon-9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70.scope... ░░ Subject: A stop job for unit libpod-conmon-9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70.scope has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70.scope has begun execution. ░░ ░░ The job identifier is 2196. Dec 21 11:32:22 managed-node2 systemd[1]: libpod-conmon-9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70.scope has successfully entered the 'dead' state. Dec 21 11:32:22 managed-node2 systemd[1]: Stopped libpod-conmon-9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70.scope. ░░ Subject: A stop job for unit libpod-conmon-9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70.scope has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70.scope has finished. ░░ ░░ The job identifier is 2196 and the job result is done. Dec 21 11:32:22 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Dec 21 11:32:22 managed-node2 systemd[1]: Removed slice machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice - cgroup machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice. ░░ Subject: A stop job for unit machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice has finished. ░░ ░░ The job identifier is 2195 and the job result is done. Dec 21 11:32:22 managed-node2 podman[26945]: 2024-12-21 11:32:22.650966739 -0500 EST m=+10.177627079 container remove 9cb727865dc30cdb1991055e20529e551a419199224fe45abd3bacb53817cb70 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Dec 21 11:32:22 managed-node2 podman[26945]: 2024-12-21 11:32:22.679718399 -0500 EST m=+10.206378740 container remove fd6f390a6db243ce508b523e81f09f5660199f19493a67bc670d50a1c44a4bb1 (image=localhost/podman-pause:5.3.1-1733097600, name=a47ffa0659b6-infra, pod_id=a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688, io.buildah.version=1.38.0) Dec 21 11:32:22 managed-node2 systemd[1]: machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice: Failed to open /run/systemd/transient/machine-libpod_pod_a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688.slice: No such file or directory Dec 21 11:32:22 managed-node2 podman[26945]: 2024-12-21 11:32:22.689270657 -0500 EST m=+10.215930985 pod remove a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688 (image=, name=httpd2) Dec 21 11:32:22 managed-node2 podman[26945]: Pods stopped: Dec 21 11:32:22 managed-node2 podman[26945]: a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688 Dec 21 11:32:22 managed-node2 podman[26945]: Pods removed: Dec 21 11:32:22 managed-node2 podman[26945]: a47ffa0659b63f9021ce7a7568165f582d0adaf75390f56547bfc5ffb8906688 Dec 21 11:32:22 managed-node2 podman[26945]: Secrets removed: Dec 21 11:32:22 managed-node2 podman[26945]: Volumes removed: Dec 21 11:32:22 managed-node2 podman[26945]: 2024-12-21 11:32:22.713250966 -0500 EST m=+10.239911307 container create 1e5d0ec512aa99c96c2d4f268a0e3adddf1d038167f17193e41b2f417e3d0774 (image=localhost/podman-pause:5.3.1-1733097600, name=ce41ff3061ea-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Dec 21 11:32:22 managed-node2 systemd[1]: Created slice machine-libpod_pod_d2f6641bb2ef15c9a4fc00c612016d8a1e5a2401e74b69df76528f5dea2e09a9.slice - cgroup machine-libpod_pod_d2f6641bb2ef15c9a4fc00c612016d8a1e5a2401e74b69df76528f5dea2e09a9.slice. ░░ Subject: A start job for unit machine-libpod_pod_d2f6641bb2ef15c9a4fc00c612016d8a1e5a2401e74b69df76528f5dea2e09a9.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_d2f6641bb2ef15c9a4fc00c612016d8a1e5a2401e74b69df76528f5dea2e09a9.slice has finished successfully. ░░ ░░ The job identifier is 2197. Dec 21 11:32:22 managed-node2 podman[26945]: 2024-12-21 11:32:22.760070087 -0500 EST m=+10.286730418 container create 6232f80e7e24d137282008f1a8c56f86974993cc3abcee33bf4f2bb238b7f54b (image=localhost/podman-pause:5.3.1-1733097600, name=d2f6641bb2ef-infra, pod_id=d2f6641bb2ef15c9a4fc00c612016d8a1e5a2401e74b69df76528f5dea2e09a9, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Dec 21 11:32:22 managed-node2 podman[26945]: 2024-12-21 11:32:22.768620734 -0500 EST m=+10.295281064 pod create d2f6641bb2ef15c9a4fc00c612016d8a1e5a2401e74b69df76528f5dea2e09a9 (image=, name=httpd2) Dec 21 11:32:22 managed-node2 podman[26945]: 2024-12-21 11:32:22.795590792 -0500 EST m=+10.322251120 container create fb676e5504a3fceb1eaa23b3ee26e5a23ef2d12ed9e49ea918a4b7043c59a85e (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=d2f6641bb2ef15c9a4fc00c612016d8a1e5a2401e74b69df76528f5dea2e09a9, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry) Dec 21 11:32:22 managed-node2 podman[26945]: 2024-12-21 11:32:22.796419192 -0500 EST m=+10.323079662 container restart 1e5d0ec512aa99c96c2d4f268a0e3adddf1d038167f17193e41b2f417e3d0774 (image=localhost/podman-pause:5.3.1-1733097600, name=ce41ff3061ea-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Dec 21 11:32:22 managed-node2 systemd[1]: Started libpod-1e5d0ec512aa99c96c2d4f268a0e3adddf1d038167f17193e41b2f417e3d0774.scope - libcrun container. ░░ Subject: A start job for unit libpod-1e5d0ec512aa99c96c2d4f268a0e3adddf1d038167f17193e41b2f417e3d0774.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-1e5d0ec512aa99c96c2d4f268a0e3adddf1d038167f17193e41b2f417e3d0774.scope has finished successfully. ░░ ░░ The job identifier is 2203. Dec 21 11:32:22 managed-node2 podman[26945]: 2024-12-21 11:32:22.772763403 -0500 EST m=+10.299423841 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Dec 21 11:32:22 managed-node2 podman[26945]: 2024-12-21 11:32:22.864475426 -0500 EST m=+10.391135881 container init 1e5d0ec512aa99c96c2d4f268a0e3adddf1d038167f17193e41b2f417e3d0774 (image=localhost/podman-pause:5.3.1-1733097600, name=ce41ff3061ea-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Dec 21 11:32:22 managed-node2 podman[26945]: 2024-12-21 11:32:22.866845559 -0500 EST m=+10.393505965 container start 1e5d0ec512aa99c96c2d4f268a0e3adddf1d038167f17193e41b2f417e3d0774 (image=localhost/podman-pause:5.3.1-1733097600, name=ce41ff3061ea-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Dec 21 11:32:22 managed-node2 NetworkManager[775]: [1734798742.8809] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/5) Dec 21 11:32:22 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Dec 21 11:32:22 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Dec 21 11:32:22 managed-node2 kernel: veth0: entered allmulticast mode Dec 21 11:32:22 managed-node2 kernel: veth0: entered promiscuous mode Dec 21 11:32:22 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Dec 21 11:32:22 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Dec 21 11:32:22 managed-node2 NetworkManager[775]: [1734798742.8926] device (podman1): carrier: link connected Dec 21 11:32:22 managed-node2 NetworkManager[775]: [1734798742.8943] device (veth0): carrier: link connected Dec 21 11:32:22 managed-node2 NetworkManager[775]: [1734798742.8948] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/6) Dec 21 11:32:22 managed-node2 (udev-worker)[27001]: Network interface NamePolicy= disabled on kernel command line. Dec 21 11:32:22 managed-node2 (udev-worker)[27000]: Network interface NamePolicy= disabled on kernel command line. Dec 21 11:32:22 managed-node2 NetworkManager[775]: [1734798742.9312] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Dec 21 11:32:22 managed-node2 NetworkManager[775]: [1734798742.9317] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Dec 21 11:32:22 managed-node2 NetworkManager[775]: [1734798742.9324] device (podman1): Activation: starting connection 'podman1' (29f3f6df-595a-4aab-a041-3e41dcfec371) Dec 21 11:32:22 managed-node2 NetworkManager[775]: [1734798742.9326] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Dec 21 11:32:22 managed-node2 NetworkManager[775]: [1734798742.9329] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external') Dec 21 11:32:22 managed-node2 NetworkManager[775]: [1734798742.9331] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external') Dec 21 11:32:22 managed-node2 NetworkManager[775]: [1734798742.9333] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Dec 21 11:32:22 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2210. Dec 21 11:32:22 managed-node2 systemd[1]: Started run-r88fc7918e0e342d1b6a0c330a8db1509.scope - /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-r88fc7918e0e342d1b6a0c330a8db1509.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-r88fc7918e0e342d1b6a0c330a8db1509.scope has finished successfully. ░░ ░░ The job identifier is 2289. Dec 21 11:32:22 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2210. Dec 21 11:32:22 managed-node2 NetworkManager[775]: [1734798742.9895] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Dec 21 11:32:22 managed-node2 NetworkManager[775]: [1734798742.9898] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external') Dec 21 11:32:22 managed-node2 NetworkManager[775]: [1734798742.9903] device (podman1): Activation: successful, device activated. Dec 21 11:32:23 managed-node2 systemd[1]: Started libpod-6232f80e7e24d137282008f1a8c56f86974993cc3abcee33bf4f2bb238b7f54b.scope - libcrun container. ░░ Subject: A start job for unit libpod-6232f80e7e24d137282008f1a8c56f86974993cc3abcee33bf4f2bb238b7f54b.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-6232f80e7e24d137282008f1a8c56f86974993cc3abcee33bf4f2bb238b7f54b.scope has finished successfully. ░░ ░░ The job identifier is 2295. Dec 21 11:32:23 managed-node2 podman[26945]: 2024-12-21 11:32:23.032325647 -0500 EST m=+10.558986112 container init 6232f80e7e24d137282008f1a8c56f86974993cc3abcee33bf4f2bb238b7f54b (image=localhost/podman-pause:5.3.1-1733097600, name=d2f6641bb2ef-infra, pod_id=d2f6641bb2ef15c9a4fc00c612016d8a1e5a2401e74b69df76528f5dea2e09a9, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, io.buildah.version=1.38.0) Dec 21 11:32:23 managed-node2 podman[26945]: 2024-12-21 11:32:23.035439049 -0500 EST m=+10.562099435 container start 6232f80e7e24d137282008f1a8c56f86974993cc3abcee33bf4f2bb238b7f54b (image=localhost/podman-pause:5.3.1-1733097600, name=d2f6641bb2ef-infra, pod_id=d2f6641bb2ef15c9a4fc00c612016d8a1e5a2401e74b69df76528f5dea2e09a9, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, io.buildah.version=1.38.0) Dec 21 11:32:23 managed-node2 systemd[1]: Started libpod-fb676e5504a3fceb1eaa23b3ee26e5a23ef2d12ed9e49ea918a4b7043c59a85e.scope - libcrun container. ░░ Subject: A start job for unit libpod-fb676e5504a3fceb1eaa23b3ee26e5a23ef2d12ed9e49ea918a4b7043c59a85e.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-fb676e5504a3fceb1eaa23b3ee26e5a23ef2d12ed9e49ea918a4b7043c59a85e.scope has finished successfully. ░░ ░░ The job identifier is 2302. Dec 21 11:32:23 managed-node2 podman[26945]: 2024-12-21 11:32:23.084962358 -0500 EST m=+10.611622739 container init fb676e5504a3fceb1eaa23b3ee26e5a23ef2d12ed9e49ea918a4b7043c59a85e (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=d2f6641bb2ef15c9a4fc00c612016d8a1e5a2401e74b69df76528f5dea2e09a9, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Dec 21 11:32:23 managed-node2 podman[26945]: 2024-12-21 11:32:23.087780848 -0500 EST m=+10.614441278 container start fb676e5504a3fceb1eaa23b3ee26e5a23ef2d12ed9e49ea918a4b7043c59a85e (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=d2f6641bb2ef15c9a4fc00c612016d8a1e5a2401e74b69df76528f5dea2e09a9, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Dec 21 11:32:23 managed-node2 podman[26945]: 2024-12-21 11:32:23.094305323 -0500 EST m=+10.620965733 pod start d2f6641bb2ef15c9a4fc00c612016d8a1e5a2401e74b69df76528f5dea2e09a9 (image=, name=httpd2) Dec 21 11:32:23 managed-node2 podman[26945]: Pod: Dec 21 11:32:23 managed-node2 podman[26945]: d2f6641bb2ef15c9a4fc00c612016d8a1e5a2401e74b69df76528f5dea2e09a9 Dec 21 11:32:23 managed-node2 podman[26945]: Container: Dec 21 11:32:23 managed-node2 podman[26945]: fb676e5504a3fceb1eaa23b3ee26e5a23ef2d12ed9e49ea918a4b7043c59a85e Dec 21 11:32:23 managed-node2 systemd[1]: Started podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished successfully. ░░ ░░ The job identifier is 2110. Dec 21 11:32:24 managed-node2 python3.12[27181]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:32:25 managed-node2 python3.12[27314]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd3.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:32:26 managed-node2 python3.12[27446]: ansible-file Invoked with path=/tmp/lsr_9pquyim__podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:32:26 managed-node2 python3.12[27577]: ansible-file Invoked with path=/tmp/lsr_9pquyim__podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:32:28 managed-node2 podman[27739]: 2024-12-21 11:32:28.116444429 -0500 EST m=+0.890032782 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Dec 21 11:32:28 managed-node2 python3.12[27885]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:32:28 managed-node2 python3.12[28016]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:32:29 managed-node2 python3.12[28147]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 21 11:32:29 managed-node2 python3.12[28252]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd3.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1734798749.203544-16314-18816282897815/.source.yml _original_basename=._3jkfmmm follow=False checksum=356b616d8bef8b884d4d415b95d12e39be295f85 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:32:30 managed-node2 python3.12[28383]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Dec 21 11:32:30 managed-node2 systemd[1]: Created slice machine-libpod_pod_0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702.slice - cgroup machine-libpod_pod_0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702.slice. ░░ Subject: A start job for unit machine-libpod_pod_0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702.slice has finished successfully. ░░ ░░ The job identifier is 2309. Dec 21 11:32:30 managed-node2 podman[28391]: 2024-12-21 11:32:30.535472432 -0500 EST m=+0.063890926 container create a6c52234a492d3974488b003466f9269dae3dbd90bd3bc4a842cb9ab66932875 (image=localhost/podman-pause:5.3.1-1733097600, name=0d2100f17275-infra, pod_id=0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702, io.buildah.version=1.38.0) Dec 21 11:32:30 managed-node2 podman[28391]: 2024-12-21 11:32:30.542014213 -0500 EST m=+0.070432641 pod create 0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702 (image=, name=httpd3) Dec 21 11:32:30 managed-node2 podman[28391]: 2024-12-21 11:32:30.570810705 -0500 EST m=+0.099229197 container create 32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702, created_by=test/system/build-testimage, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry, created_at=2021-06-10T18:55:36Z) Dec 21 11:32:30 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Dec 21 11:32:30 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Dec 21 11:32:30 managed-node2 kernel: veth1: entered allmulticast mode Dec 21 11:32:30 managed-node2 kernel: veth1: entered promiscuous mode Dec 21 11:32:30 managed-node2 NetworkManager[775]: [1734798750.5874] manager: (veth1): new Veth device (/org/freedesktop/NetworkManager/Devices/7) Dec 21 11:32:30 managed-node2 podman[28391]: 2024-12-21 11:32:30.544034506 -0500 EST m=+0.072453111 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Dec 21 11:32:30 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Dec 21 11:32:30 managed-node2 kernel: podman1: port 2(veth1) entered forwarding state Dec 21 11:32:30 managed-node2 NetworkManager[775]: [1734798750.5913] device (veth1): carrier: link connected Dec 21 11:32:30 managed-node2 (udev-worker)[28410]: Network interface NamePolicy= disabled on kernel command line. Dec 21 11:32:30 managed-node2 systemd[1]: Started libpod-conmon-a6c52234a492d3974488b003466f9269dae3dbd90bd3bc4a842cb9ab66932875.scope. ░░ Subject: A start job for unit libpod-conmon-a6c52234a492d3974488b003466f9269dae3dbd90bd3bc4a842cb9ab66932875.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-a6c52234a492d3974488b003466f9269dae3dbd90bd3bc4a842cb9ab66932875.scope has finished successfully. ░░ ░░ The job identifier is 2316. Dec 21 11:32:30 managed-node2 systemd[1]: Started libpod-a6c52234a492d3974488b003466f9269dae3dbd90bd3bc4a842cb9ab66932875.scope - libcrun container. ░░ Subject: A start job for unit libpod-a6c52234a492d3974488b003466f9269dae3dbd90bd3bc4a842cb9ab66932875.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-a6c52234a492d3974488b003466f9269dae3dbd90bd3bc4a842cb9ab66932875.scope has finished successfully. ░░ ░░ The job identifier is 2323. Dec 21 11:32:30 managed-node2 podman[28391]: 2024-12-21 11:32:30.719560842 -0500 EST m=+0.247979403 container init a6c52234a492d3974488b003466f9269dae3dbd90bd3bc4a842cb9ab66932875 (image=localhost/podman-pause:5.3.1-1733097600, name=0d2100f17275-infra, pod_id=0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702, io.buildah.version=1.38.0) Dec 21 11:32:30 managed-node2 podman[28391]: 2024-12-21 11:32:30.723001472 -0500 EST m=+0.251419992 container start a6c52234a492d3974488b003466f9269dae3dbd90bd3bc4a842cb9ab66932875 (image=localhost/podman-pause:5.3.1-1733097600, name=0d2100f17275-infra, pod_id=0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702, io.buildah.version=1.38.0) Dec 21 11:32:30 managed-node2 systemd[1]: Started libpod-conmon-32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c.scope. ░░ Subject: A start job for unit libpod-conmon-32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c.scope has finished successfully. ░░ ░░ The job identifier is 2330. Dec 21 11:32:30 managed-node2 systemd[1]: Started libpod-32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c.scope - libcrun container. ░░ Subject: A start job for unit libpod-32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c.scope has finished successfully. ░░ ░░ The job identifier is 2337. Dec 21 11:32:30 managed-node2 podman[28391]: 2024-12-21 11:32:30.792451436 -0500 EST m=+0.320869970 container init 32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Dec 21 11:32:30 managed-node2 podman[28391]: 2024-12-21 11:32:30.795390308 -0500 EST m=+0.323808836 container start 32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Dec 21 11:32:30 managed-node2 podman[28391]: 2024-12-21 11:32:30.801447057 -0500 EST m=+0.329865572 pod start 0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702 (image=, name=httpd3) Dec 21 11:32:31 managed-node2 python3.12[28571]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 21 11:32:31 managed-node2 systemd[1]: Reload requested from client PID 28572 ('systemctl') (unit session-5.scope)... Dec 21 11:32:31 managed-node2 systemd[1]: Reloading... Dec 21 11:32:31 managed-node2 systemd[1]: Reloading finished in 220 ms. Dec 21 11:32:31 managed-node2 systemd[1]: Starting logrotate.service - Rotate log files... ░░ Subject: A start job for unit logrotate.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has begun execution. ░░ ░░ The job identifier is 2344. Dec 21 11:32:31 managed-node2 systemd[1]: logrotate.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit logrotate.service has successfully entered the 'dead' state. Dec 21 11:32:31 managed-node2 systemd[1]: Finished logrotate.service - Rotate log files. ░░ Subject: A start job for unit logrotate.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has finished successfully. ░░ ░░ The job identifier is 2344. Dec 21 11:32:32 managed-node2 python3.12[28762]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Dec 21 11:32:32 managed-node2 systemd[1]: Reload requested from client PID 28765 ('systemctl') (unit session-5.scope)... Dec 21 11:32:32 managed-node2 systemd[1]: Reloading... Dec 21 11:32:32 managed-node2 systemd[1]: Reloading finished in 213 ms. Dec 21 11:32:33 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Dec 21 11:32:33 managed-node2 python3.12[28951]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Dec 21 11:32:33 managed-node2 systemd[1]: Starting podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun execution. ░░ ░░ The job identifier is 2422. Dec 21 11:32:33 managed-node2 podman[28956]: 2024-12-21 11:32:33.242876627 -0500 EST m=+0.025638890 pod stop 0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702 (image=, name=httpd3) Dec 21 11:32:33 managed-node2 systemd[1]: libpod-a6c52234a492d3974488b003466f9269dae3dbd90bd3bc4a842cb9ab66932875.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-a6c52234a492d3974488b003466f9269dae3dbd90bd3bc4a842cb9ab66932875.scope has successfully entered the 'dead' state. Dec 21 11:32:33 managed-node2 podman[28956]: 2024-12-21 11:32:33.263225055 -0500 EST m=+0.045987552 container died a6c52234a492d3974488b003466f9269dae3dbd90bd3bc4a842cb9ab66932875 (image=localhost/podman-pause:5.3.1-1733097600, name=0d2100f17275-infra, io.buildah.version=1.38.0) Dec 21 11:32:33 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Dec 21 11:32:33 managed-node2 kernel: veth1 (unregistering): left allmulticast mode Dec 21 11:32:33 managed-node2 kernel: veth1 (unregistering): left promiscuous mode Dec 21 11:32:33 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Dec 21 11:32:33 managed-node2 systemd[1]: run-netns-netns\x2d4e373f23\x2d58bf\x2df7fc\x2da36b\x2dd13613a2791c.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2d4e373f23\x2d58bf\x2df7fc\x2da36b\x2dd13613a2791c.mount has successfully entered the 'dead' state. Dec 21 11:32:33 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-a6c52234a492d3974488b003466f9269dae3dbd90bd3bc4a842cb9ab66932875-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-a6c52234a492d3974488b003466f9269dae3dbd90bd3bc4a842cb9ab66932875-userdata-shm.mount has successfully entered the 'dead' state. Dec 21 11:32:33 managed-node2 systemd[1]: var-lib-containers-storage-overlay-0a2ca7ba44d782d944ef1485a30faf82fbf1d196b6a7a05d8f71aa13fa0d892f-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-0a2ca7ba44d782d944ef1485a30faf82fbf1d196b6a7a05d8f71aa13fa0d892f-merged.mount has successfully entered the 'dead' state. Dec 21 11:32:33 managed-node2 podman[28956]: 2024-12-21 11:32:33.34672823 -0500 EST m=+0.129490409 container cleanup a6c52234a492d3974488b003466f9269dae3dbd90bd3bc4a842cb9ab66932875 (image=localhost/podman-pause:5.3.1-1733097600, name=0d2100f17275-infra, pod_id=0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702, io.buildah.version=1.38.0) Dec 21 11:32:33 managed-node2 systemd[1]: libpod-conmon-a6c52234a492d3974488b003466f9269dae3dbd90bd3bc4a842cb9ab66932875.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-a6c52234a492d3974488b003466f9269dae3dbd90bd3bc4a842cb9ab66932875.scope has successfully entered the 'dead' state. Dec 21 11:32:43 managed-node2 podman[28956]: time="2024-12-21T11:32:43-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd3-httpd3 in 10 seconds, resorting to SIGKILL" Dec 21 11:32:43 managed-node2 systemd[1]: libpod-32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c.scope has successfully entered the 'dead' state. Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.289656672 -0500 EST m=+10.072419071 container died 32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Dec 21 11:32:43 managed-node2 systemd[1]: var-lib-containers-storage-overlay-90f1665c6d14610807025945bb1c5bf0a44b71c87ac6b1a95b1dcb8c357cde09-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-90f1665c6d14610807025945bb1c5bf0a44b71c87ac6b1a95b1dcb8c357cde09-merged.mount has successfully entered the 'dead' state. Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.32683333 -0500 EST m=+10.109595630 container cleanup 32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Dec 21 11:32:43 managed-node2 systemd[1]: Stopping libpod-conmon-32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c.scope... ░░ Subject: A stop job for unit libpod-conmon-32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c.scope has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c.scope has begun execution. ░░ ░░ The job identifier is 2508. Dec 21 11:32:43 managed-node2 systemd[1]: libpod-conmon-32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c.scope has successfully entered the 'dead' state. Dec 21 11:32:43 managed-node2 systemd[1]: Stopped libpod-conmon-32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c.scope. ░░ Subject: A stop job for unit libpod-conmon-32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c.scope has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c.scope has finished. ░░ ░░ The job identifier is 2508 and the job result is done. Dec 21 11:32:43 managed-node2 systemd[1]: Removed slice machine-libpod_pod_0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702.slice - cgroup machine-libpod_pod_0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702.slice. ░░ Subject: A stop job for unit machine-libpod_pod_0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702.slice has finished. ░░ ░░ The job identifier is 2507 and the job result is done. Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.33522454 -0500 EST m=+10.117986798 pod stop 0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702 (image=, name=httpd3) Dec 21 11:32:43 managed-node2 systemd[1]: machine-libpod_pod_0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702.slice: Failed to open /run/systemd/transient/machine-libpod_pod_0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702.slice: No such file or directory Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.342340383 -0500 EST m=+10.125102713 pod stop 0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702 (image=, name=httpd3) Dec 21 11:32:43 managed-node2 systemd[1]: machine-libpod_pod_0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702.slice: Failed to open /run/systemd/transient/machine-libpod_pod_0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702.slice: No such file or directory Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.369732065 -0500 EST m=+10.152494255 container remove 32030132d42aba1ff20103a4fb8b934b89392bc60d586d75ec6f20f8d3707c4c (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test) Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.396911901 -0500 EST m=+10.179674112 container remove a6c52234a492d3974488b003466f9269dae3dbd90bd3bc4a842cb9ab66932875 (image=localhost/podman-pause:5.3.1-1733097600, name=0d2100f17275-infra, pod_id=0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702, io.buildah.version=1.38.0) Dec 21 11:32:43 managed-node2 systemd[1]: machine-libpod_pod_0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702.slice: Failed to open /run/systemd/transient/machine-libpod_pod_0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702.slice: No such file or directory Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.405993365 -0500 EST m=+10.188755541 pod remove 0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702 (image=, name=httpd3) Dec 21 11:32:43 managed-node2 podman[28956]: Pods stopped: Dec 21 11:32:43 managed-node2 podman[28956]: 0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702 Dec 21 11:32:43 managed-node2 podman[28956]: Pods removed: Dec 21 11:32:43 managed-node2 podman[28956]: 0d2100f17275a183cf6dbd0027242aab992f107db8baebd9fdea4c542c731702 Dec 21 11:32:43 managed-node2 podman[28956]: Secrets removed: Dec 21 11:32:43 managed-node2 podman[28956]: Volumes removed: Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.431075178 -0500 EST m=+10.213837357 container create 61716cd302892e2252e6e13ed698e8739b1a809d43e4305353cc34e906dd1a9e (image=localhost/podman-pause:5.3.1-1733097600, name=a8db008d7cc8-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Dec 21 11:32:43 managed-node2 systemd[1]: Created slice machine-libpod_pod_26cc0fa7c8097dd3061079c8f0d79db2a774331739fc0212c12243e67c727199.slice - cgroup machine-libpod_pod_26cc0fa7c8097dd3061079c8f0d79db2a774331739fc0212c12243e67c727199.slice. ░░ Subject: A start job for unit machine-libpod_pod_26cc0fa7c8097dd3061079c8f0d79db2a774331739fc0212c12243e67c727199.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_26cc0fa7c8097dd3061079c8f0d79db2a774331739fc0212c12243e67c727199.slice has finished successfully. ░░ ░░ The job identifier is 2509. Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.470053048 -0500 EST m=+10.252815223 container create 2907e4388cf2d2224f218ff9618b15f878c5d48a6a1130eb9fba43e884f5b009 (image=localhost/podman-pause:5.3.1-1733097600, name=26cc0fa7c809-infra, pod_id=26cc0fa7c8097dd3061079c8f0d79db2a774331739fc0212c12243e67c727199, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, io.buildah.version=1.38.0) Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.476236336 -0500 EST m=+10.258998514 pod create 26cc0fa7c8097dd3061079c8f0d79db2a774331739fc0212c12243e67c727199 (image=, name=httpd3) Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.478414019 -0500 EST m=+10.261176309 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.504509835 -0500 EST m=+10.287272011 container create a6061cf8dd2a28a1b959fccc574d61dff99ebd633d6e5c46e2b9ba7b3c55018c (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=26cc0fa7c8097dd3061079c8f0d79db2a774331739fc0212c12243e67c727199, app=test, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.50489022 -0500 EST m=+10.287652408 container restart 61716cd302892e2252e6e13ed698e8739b1a809d43e4305353cc34e906dd1a9e (image=localhost/podman-pause:5.3.1-1733097600, name=a8db008d7cc8-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Dec 21 11:32:43 managed-node2 systemd[1]: Started libpod-61716cd302892e2252e6e13ed698e8739b1a809d43e4305353cc34e906dd1a9e.scope - libcrun container. ░░ Subject: A start job for unit libpod-61716cd302892e2252e6e13ed698e8739b1a809d43e4305353cc34e906dd1a9e.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-61716cd302892e2252e6e13ed698e8739b1a809d43e4305353cc34e906dd1a9e.scope has finished successfully. ░░ ░░ The job identifier is 2515. Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.548961324 -0500 EST m=+10.331723546 container init 61716cd302892e2252e6e13ed698e8739b1a809d43e4305353cc34e906dd1a9e (image=localhost/podman-pause:5.3.1-1733097600, name=a8db008d7cc8-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.551487902 -0500 EST m=+10.334250212 container start 61716cd302892e2252e6e13ed698e8739b1a809d43e4305353cc34e906dd1a9e (image=localhost/podman-pause:5.3.1-1733097600, name=a8db008d7cc8-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Dec 21 11:32:43 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Dec 21 11:32:43 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Dec 21 11:32:43 managed-node2 kernel: veth1: entered allmulticast mode Dec 21 11:32:43 managed-node2 kernel: veth1: entered promiscuous mode Dec 21 11:32:43 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Dec 21 11:32:43 managed-node2 kernel: podman1: port 2(veth1) entered forwarding state Dec 21 11:32:43 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Dec 21 11:32:43 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Dec 21 11:32:43 managed-node2 kernel: podman1: port 2(veth1) entered forwarding state Dec 21 11:32:43 managed-node2 NetworkManager[775]: [1734798763.5765] manager: (veth1): new Veth device (/org/freedesktop/NetworkManager/Devices/8) Dec 21 11:32:43 managed-node2 NetworkManager[775]: [1734798763.5809] device (veth1): carrier: link connected Dec 21 11:32:43 managed-node2 (udev-worker)[29003]: Network interface NamePolicy= disabled on kernel command line. Dec 21 11:32:43 managed-node2 systemd[1]: Started libpod-2907e4388cf2d2224f218ff9618b15f878c5d48a6a1130eb9fba43e884f5b009.scope - libcrun container. ░░ Subject: A start job for unit libpod-2907e4388cf2d2224f218ff9618b15f878c5d48a6a1130eb9fba43e884f5b009.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-2907e4388cf2d2224f218ff9618b15f878c5d48a6a1130eb9fba43e884f5b009.scope has finished successfully. ░░ ░░ The job identifier is 2522. Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.674483419 -0500 EST m=+10.457245731 container init 2907e4388cf2d2224f218ff9618b15f878c5d48a6a1130eb9fba43e884f5b009 (image=localhost/podman-pause:5.3.1-1733097600, name=26cc0fa7c809-infra, pod_id=26cc0fa7c8097dd3061079c8f0d79db2a774331739fc0212c12243e67c727199, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, io.buildah.version=1.38.0) Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.677263682 -0500 EST m=+10.460025936 container start 2907e4388cf2d2224f218ff9618b15f878c5d48a6a1130eb9fba43e884f5b009 (image=localhost/podman-pause:5.3.1-1733097600, name=26cc0fa7c809-infra, pod_id=26cc0fa7c8097dd3061079c8f0d79db2a774331739fc0212c12243e67c727199, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, io.buildah.version=1.38.0) Dec 21 11:32:43 managed-node2 systemd[1]: Started libpod-a6061cf8dd2a28a1b959fccc574d61dff99ebd633d6e5c46e2b9ba7b3c55018c.scope - libcrun container. ░░ Subject: A start job for unit libpod-a6061cf8dd2a28a1b959fccc574d61dff99ebd633d6e5c46e2b9ba7b3c55018c.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-a6061cf8dd2a28a1b959fccc574d61dff99ebd633d6e5c46e2b9ba7b3c55018c.scope has finished successfully. ░░ ░░ The job identifier is 2529. Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.726895569 -0500 EST m=+10.509657828 container init a6061cf8dd2a28a1b959fccc574d61dff99ebd633d6e5c46e2b9ba7b3c55018c (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=26cc0fa7c8097dd3061079c8f0d79db2a774331739fc0212c12243e67c727199, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.729387453 -0500 EST m=+10.512149712 container start a6061cf8dd2a28a1b959fccc574d61dff99ebd633d6e5c46e2b9ba7b3c55018c (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=26cc0fa7c8097dd3061079c8f0d79db2a774331739fc0212c12243e67c727199, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Dec 21 11:32:43 managed-node2 podman[28956]: 2024-12-21 11:32:43.735502247 -0500 EST m=+10.518264438 pod start 26cc0fa7c8097dd3061079c8f0d79db2a774331739fc0212c12243e67c727199 (image=, name=httpd3) Dec 21 11:32:43 managed-node2 podman[28956]: Pod: Dec 21 11:32:43 managed-node2 podman[28956]: 26cc0fa7c8097dd3061079c8f0d79db2a774331739fc0212c12243e67c727199 Dec 21 11:32:43 managed-node2 podman[28956]: Container: Dec 21 11:32:43 managed-node2 podman[28956]: a6061cf8dd2a28a1b959fccc574d61dff99ebd633d6e5c46e2b9ba7b3c55018c Dec 21 11:32:43 managed-node2 systemd[1]: Started podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished successfully. ░░ ░░ The job identifier is 2422. Dec 21 11:32:44 managed-node2 sudo[29207]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fvyxrsktvufjdvxghfwczcfatmaofusl ; /usr/bin/python3.12 /var/tmp/ansible-tmp-1734798764.2274415-16922-188999455305959/AnsiballZ_command.py' Dec 21 11:32:44 managed-node2 sudo[29207]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-29207) opened. Dec 21 11:32:44 managed-node2 sudo[29207]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Dec 21 11:32:44 managed-node2 python3.12[29210]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:32:44 managed-node2 systemd[22955]: Started podman-29218.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 112. Dec 21 11:32:44 managed-node2 sudo[29207]: pam_unix(sudo:session): session closed for user podman_basic_user Dec 21 11:32:45 managed-node2 python3.12[29358]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:32:45 managed-node2 python3.12[29498]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:32:45 managed-node2 sudo[29679]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qgioeadtlhugbehxziajjnovvcezkswg ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1734798765.7554495-16999-94045399399875/AnsiballZ_command.py' Dec 21 11:32:45 managed-node2 sudo[29679]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-29679) opened. Dec 21 11:32:45 managed-node2 sudo[29679]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Dec 21 11:32:46 managed-node2 python3.12[29682]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --user list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd1[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:32:46 managed-node2 sudo[29679]: pam_unix(sudo:session): session closed for user podman_basic_user Dec 21 11:32:46 managed-node2 python3.12[29816]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd2[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:32:46 managed-node2 python3.12[29950]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd3[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:32:47 managed-node2 python3.12[30084]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15001/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:32:48 managed-node2 python3.12[30216]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15002/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:32:48 managed-node2 python3.12[30347]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:32:49 managed-node2 python3.12[30479]: ansible-file Invoked with path=/etc/containers/storage.conf state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:32:49 managed-node2 python3.12[30610]: ansible-file Invoked with path=/tmp/lsr_9pquyim__podman state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:32:51 managed-node2 python3.12[30784]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Dec 21 11:32:53 managed-node2 python3.12[30957]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:32:53 managed-node2 python3.12[31088]: ansible-ansible.legacy.dnf Invoked with name=['python3-pyasn1', 'python3-cryptography', 'python3-dbus'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 21 11:32:56 managed-node2 python3.12[31224]: ansible-ansible.legacy.dnf Invoked with name=['certmonger', 'python3-packaging'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 21 11:32:57 managed-node2 dbus-broker-launch[650]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Dec 21 11:32:57 managed-node2 dbus-broker-launch[23428]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Dec 21 11:32:57 managed-node2 dbus-broker-launch[650]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Dec 21 11:32:57 managed-node2 dbus-broker-launch[23428]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Dec 21 11:32:57 managed-node2 dbus-broker-launch[23428]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Dec 21 11:32:57 managed-node2 dbus-broker-launch[650]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Dec 21 11:32:57 managed-node2 dbus-broker-launch[23428]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Dec 21 11:32:57 managed-node2 dbus-broker-launch[23428]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Dec 21 11:32:57 managed-node2 dbus-broker-launch[23428]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Dec 21 11:32:57 managed-node2 systemd[1]: Reload requested from client PID 31232 ('systemctl') (unit session-5.scope)... Dec 21 11:32:57 managed-node2 systemd[1]: Reloading... Dec 21 11:32:57 managed-node2 systemd[1]: Reloading finished in 220 ms. Dec 21 11:32:57 managed-node2 systemd[1]: Started run-r6ee5ccc8ad3942f095fb125ebd648961.service - /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-r6ee5ccc8ad3942f095fb125ebd648961.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-r6ee5ccc8ad3942f095fb125ebd648961.service has finished successfully. ░░ ░░ The job identifier is 2540. Dec 21 11:32:58 managed-node2 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 2618. Dec 21 11:32:58 managed-node2 systemd[1]: Reload requested from client PID 31296 ('systemctl') (unit session-5.scope)... Dec 21 11:32:58 managed-node2 systemd[1]: Reloading... Dec 21 11:32:58 managed-node2 systemd[1]: Reloading finished in 363 ms. Dec 21 11:32:58 managed-node2 systemd[1]: Queuing reload/restart jobs for marked units… Dec 21 11:32:58 managed-node2 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Dec 21 11:32:58 managed-node2 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 2618. Dec 21 11:32:58 managed-node2 systemd[1]: run-r6ee5ccc8ad3942f095fb125ebd648961.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-r6ee5ccc8ad3942f095fb125ebd648961.service has successfully entered the 'dead' state. Dec 21 11:32:59 managed-node2 python3.12[31489]: ansible-file Invoked with name=/etc/certmonger//pre-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//pre-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:32:59 managed-node2 python3.12[31620]: ansible-file Invoked with name=/etc/certmonger//post-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//post-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:33:00 managed-node2 python3.12[31751]: ansible-ansible.legacy.systemd Invoked with name=certmonger state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Dec 21 11:33:00 managed-node2 systemd[1]: Reload requested from client PID 31754 ('systemctl') (unit session-5.scope)... Dec 21 11:33:00 managed-node2 systemd[1]: Reloading... Dec 21 11:33:00 managed-node2 systemd[1]: Reloading finished in 209 ms. Dec 21 11:33:00 managed-node2 systemd[1]: Starting certmonger.service - Certificate monitoring and PKI enrollment... ░░ Subject: A start job for unit certmonger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit certmonger.service has begun execution. ░░ ░░ The job identifier is 2696. Dec 21 11:33:00 managed-node2 (rtmonger)[31811]: certmonger.service: Referenced but unset environment variable evaluates to an empty string: OPTS Dec 21 11:33:00 managed-node2 systemd[1]: Started certmonger.service - Certificate monitoring and PKI enrollment. ░░ Subject: A start job for unit certmonger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit certmonger.service has finished successfully. ░░ ░░ The job identifier is 2696. Dec 21 11:33:01 managed-node2 python3.12[31969]: ansible-fedora.linux_system_roles.certificate_request Invoked with name=quadlet_demo dns=['localhost'] directory=/etc/pki/tls wait=True ca=self-sign __header=# # Ansible managed # # system_role:certificate provider_config_directory=/etc/certmonger provider=certmonger key_usage=['digitalSignature', 'keyEncipherment'] extended_key_usage=['id-kp-serverAuth', 'id-kp-clientAuth'] auto_renew=True ip=None email=None common_name=None country=None state=None locality=None organization=None organizational_unit=None contact_email=None key_size=None owner=None group=None mode=None principal=None run_before=None run_after=None Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 rsyslogd[658]: imjournal: journal files changed, reloading... [v8.2408.0-2.el10 try https://www.rsyslog.com/e/0 ] Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:01 managed-node2 certmonger[31985]: Certificate in file "/etc/pki/tls/certs/quadlet_demo.crt" issued by CA and saved. Dec 21 11:33:01 managed-node2 certmonger[31811]: 2024-12-21 11:33:01 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:02 managed-node2 python3.12[32116]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt Dec 21 11:33:02 managed-node2 python3.12[32247]: ansible-slurp Invoked with path=/etc/pki/tls/private/quadlet_demo.key src=/etc/pki/tls/private/quadlet_demo.key Dec 21 11:33:03 managed-node2 python3.12[32378]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt Dec 21 11:33:03 managed-node2 python3.12[32509]: ansible-ansible.legacy.command Invoked with _raw_params=getcert stop-tracking -f /etc/pki/tls/certs/quadlet_demo.crt _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:33:03 managed-node2 certmonger[31811]: 2024-12-21 11:33:03 [31811] Wrote to /var/lib/certmonger/requests/20241221163301 Dec 21 11:33:04 managed-node2 python3.12[32641]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:33:04 managed-node2 python3.12[32772]: ansible-file Invoked with path=/etc/pki/tls/private/quadlet_demo.key state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:33:04 managed-node2 python3.12[32903]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:33:05 managed-node2 python3.12[33034]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:33:05 managed-node2 python3.12[33165]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:33:07 managed-node2 python3.12[33427]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:33:08 managed-node2 python3.12[33564]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Dec 21 11:33:09 managed-node2 python3.12[33696]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:33:11 managed-node2 python3.12[33829]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:33:11 managed-node2 python3.12[33960]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:33:12 managed-node2 python3.12[34091]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 21 11:33:13 managed-node2 python3.12[34223]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Dec 21 11:33:14 managed-node2 python3.12[34356]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Dec 21 11:33:15 managed-node2 python3.12[34489]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Dec 21 11:33:15 managed-node2 python3.12[34620]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Dec 21 11:33:20 managed-node2 python3.12[35227]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:33:22 managed-node2 python3.12[35360]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:33:22 managed-node2 python3.12[35491]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.network follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 21 11:33:23 managed-node2 python3.12[35596]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1734798802.6458833-18916-273275758786371/.source.network dest=/etc/containers/systemd/quadlet-demo.network owner=root group=0 mode=0644 _original_basename=quadlet-demo.network follow=False checksum=e57c08d49aff4bae8daab138d913aeddaa8682a0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:33:24 managed-node2 python3.12[35727]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 21 11:33:24 managed-node2 systemd[1]: Reload requested from client PID 35728 ('systemctl') (unit session-5.scope)... Dec 21 11:33:24 managed-node2 systemd[1]: Reloading... Dec 21 11:33:24 managed-node2 systemd[1]: Reloading finished in 219 ms. Dec 21 11:33:24 managed-node2 python3.12[35915]: ansible-systemd Invoked with name=quadlet-demo-network.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Dec 21 11:33:24 managed-node2 systemd[1]: Starting quadlet-demo-network.service... ░░ Subject: A start job for unit quadlet-demo-network.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-network.service has begun execution. ░░ ░░ The job identifier is 2775. Dec 21 11:33:24 managed-node2 quadlet-demo-network[35919]: systemd-quadlet-demo Dec 21 11:33:24 managed-node2 systemd[1]: Finished quadlet-demo-network.service. ░░ Subject: A start job for unit quadlet-demo-network.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-network.service has finished successfully. ░░ ░░ The job identifier is 2775. Dec 21 11:33:25 managed-node2 python3.12[36057]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:33:27 managed-node2 python3.12[36190]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:33:28 managed-node2 python3.12[36321]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.volume follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 21 11:33:28 managed-node2 python3.12[36426]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1734798807.9232495-19193-85517954084435/.source.volume dest=/etc/containers/systemd/quadlet-demo-mysql.volume owner=root group=0 mode=0644 _original_basename=quadlet-demo-mysql.volume follow=False checksum=585f8cbdf0ec73000f9227dcffbef71e9552ea4a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:33:29 managed-node2 python3.12[36557]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 21 11:33:29 managed-node2 systemd[1]: Reload requested from client PID 36558 ('systemctl') (unit session-5.scope)... Dec 21 11:33:29 managed-node2 systemd[1]: Reloading... Dec 21 11:33:29 managed-node2 systemd[1]: Reloading finished in 208 ms. Dec 21 11:33:30 managed-node2 python3.12[36744]: ansible-systemd Invoked with name=quadlet-demo-mysql-volume.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Dec 21 11:33:30 managed-node2 systemd[1]: Starting quadlet-demo-mysql-volume.service... ░░ Subject: A start job for unit quadlet-demo-mysql-volume.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql-volume.service has begun execution. ░░ ░░ The job identifier is 2859. Dec 21 11:33:30 managed-node2 podman[36748]: 2024-12-21 11:33:30.173025703 -0500 EST m=+0.026031217 volume create systemd-quadlet-demo-mysql Dec 21 11:33:30 managed-node2 quadlet-demo-mysql-volume[36748]: systemd-quadlet-demo-mysql Dec 21 11:33:30 managed-node2 systemd[1]: Finished quadlet-demo-mysql-volume.service. ░░ Subject: A start job for unit quadlet-demo-mysql-volume.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql-volume.service has finished successfully. ░░ ░░ The job identifier is 2859. Dec 21 11:33:31 managed-node2 python3.12[36887]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:33:32 managed-node2 python3.12[37020]: ansible-file Invoked with path=/tmp/quadlet_demo state=directory owner=root group=root mode=0777 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:33:39 managed-node2 podman[37160]: 2024-12-21 11:33:39.053601702 -0500 EST m=+5.698706357 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6 Dec 21 11:33:39 managed-node2 python3.12[37474]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:33:39 managed-node2 python3.12[37605]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 21 11:33:40 managed-node2 python3.12[37710]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/systemd/quadlet-demo-mysql.container owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1734798819.634735-19706-34961030233952/.source.container _original_basename=.ahp1bvs1 follow=False checksum=ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:33:40 managed-node2 python3.12[37841]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 21 11:33:40 managed-node2 systemd[1]: Reload requested from client PID 37842 ('systemctl') (unit session-5.scope)... Dec 21 11:33:40 managed-node2 systemd[1]: Reloading... Dec 21 11:33:41 managed-node2 systemd[1]: Reloading finished in 223 ms. Dec 21 11:33:41 managed-node2 python3.12[38028]: ansible-systemd Invoked with name=quadlet-demo-mysql.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Dec 21 11:33:41 managed-node2 systemd[1]: Starting quadlet-demo-mysql.service... ░░ Subject: A start job for unit quadlet-demo-mysql.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql.service has begun execution. ░░ ░░ The job identifier is 2943. Dec 21 11:33:41 managed-node2 podman[38032]: 2024-12-21 11:33:41.671006526 -0500 EST m=+0.041799247 container create 203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474 (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Dec 21 11:33:41 managed-node2 NetworkManager[775]: [1734798821.7008] manager: (podman2): new Bridge device (/org/freedesktop/NetworkManager/Devices/9) Dec 21 11:33:41 managed-node2 kernel: podman2: port 1(veth2) entered blocking state Dec 21 11:33:41 managed-node2 kernel: podman2: port 1(veth2) entered disabled state Dec 21 11:33:41 managed-node2 kernel: veth2: entered allmulticast mode Dec 21 11:33:41 managed-node2 kernel: veth2: entered promiscuous mode Dec 21 11:33:41 managed-node2 kernel: podman2: port 1(veth2) entered blocking state Dec 21 11:33:41 managed-node2 kernel: podman2: port 1(veth2) entered forwarding state Dec 21 11:33:41 managed-node2 NetworkManager[775]: [1734798821.7164] device (veth2): carrier: link connected Dec 21 11:33:41 managed-node2 NetworkManager[775]: [1734798821.7167] manager: (veth2): new Veth device (/org/freedesktop/NetworkManager/Devices/10) Dec 21 11:33:41 managed-node2 NetworkManager[775]: [1734798821.7177] device (podman2): carrier: link connected Dec 21 11:33:41 managed-node2 (udev-worker)[38047]: Network interface NamePolicy= disabled on kernel command line. Dec 21 11:33:41 managed-node2 (udev-worker)[38046]: Network interface NamePolicy= disabled on kernel command line. Dec 21 11:33:41 managed-node2 podman[38032]: 2024-12-21 11:33:41.654759215 -0500 EST m=+0.025552067 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6 Dec 21 11:33:41 managed-node2 NetworkManager[775]: [1734798821.7687] device (podman2): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Dec 21 11:33:41 managed-node2 NetworkManager[775]: [1734798821.7707] device (podman2): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Dec 21 11:33:41 managed-node2 NetworkManager[775]: [1734798821.7718] device (podman2): Activation: starting connection 'podman2' (f08971b4-089a-4771-92e2-3a619f890dc5) Dec 21 11:33:41 managed-node2 NetworkManager[775]: [1734798821.7719] device (podman2): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Dec 21 11:33:41 managed-node2 NetworkManager[775]: [1734798821.7725] device (podman2): state change: prepare -> config (reason 'none', managed-type: 'external') Dec 21 11:33:41 managed-node2 NetworkManager[775]: [1734798821.7729] device (podman2): state change: config -> ip-config (reason 'none', managed-type: 'external') Dec 21 11:33:41 managed-node2 NetworkManager[775]: [1734798821.7732] device (podman2): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Dec 21 11:33:41 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 3030. Dec 21 11:33:41 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 3030. Dec 21 11:33:41 managed-node2 NetworkManager[775]: [1734798821.8087] device (podman2): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Dec 21 11:33:41 managed-node2 NetworkManager[775]: [1734798821.8089] device (podman2): state change: secondaries -> activated (reason 'none', managed-type: 'external') Dec 21 11:33:41 managed-node2 NetworkManager[775]: [1734798821.8099] device (podman2): Activation: successful, device activated. Dec 21 11:33:41 managed-node2 systemd[1]: Started 203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474-503f4df31c37e8b6.timer - /usr/bin/podman healthcheck run 203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474. ░░ Subject: A start job for unit 203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474-503f4df31c37e8b6.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit 203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474-503f4df31c37e8b6.timer has finished successfully. ░░ ░░ The job identifier is 3109. Dec 21 11:33:41 managed-node2 podman[38032]: 2024-12-21 11:33:41.870393132 -0500 EST m=+0.241186044 container init 203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474 (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Dec 21 11:33:41 managed-node2 systemd[1]: Started quadlet-demo-mysql.service. ░░ Subject: A start job for unit quadlet-demo-mysql.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql.service has finished successfully. ░░ ░░ The job identifier is 2943. Dec 21 11:33:41 managed-node2 podman[38032]: 2024-12-21 11:33:41.897984688 -0500 EST m=+0.268777553 container start 203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474 (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Dec 21 11:33:41 managed-node2 quadlet-demo-mysql[38032]: 203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474 Dec 21 11:33:42 managed-node2 podman[38090]: 2024-12-21 11:33:42.052326045 -0500 EST m=+0.141002444 container health_status 203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474 (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=0, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Dec 21 11:33:42 managed-node2 python3.12[38285]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:33:44 managed-node2 python3.12[38429]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:33:44 managed-node2 python3.12[38560]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/envoy-proxy-configmap.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 21 11:33:44 managed-node2 python3.12[38665]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1734798824.4087186-19912-106128108103025/.source.yml dest=/etc/containers/systemd/envoy-proxy-configmap.yml owner=root group=0 mode=0644 _original_basename=envoy-proxy-configmap.yml follow=False checksum=d681c7d56f912150d041873e880818b22a90c188 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:33:45 managed-node2 python3.12[38820]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 21 11:33:45 managed-node2 systemd[1]: Reload requested from client PID 38821 ('systemctl') (unit session-5.scope)... Dec 21 11:33:45 managed-node2 systemd[1]: Reloading... Dec 21 11:33:45 managed-node2 systemd[1]: Reloading finished in 230 ms. Dec 21 11:33:46 managed-node2 python3.12[39008]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:33:48 managed-node2 python3.12[39178]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:33:48 managed-node2 python3.12[39309]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 21 11:33:49 managed-node2 python3.12[39414]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/systemd/quadlet-demo.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1734798828.4374363-20045-16704362976477/.source.yml _original_basename=.8i5lgawn follow=False checksum=998dccde0483b1654327a46ddd89cbaa47650370 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:33:50 managed-node2 python3.12[39552]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 21 11:33:50 managed-node2 systemd[1]: Reload requested from client PID 39553 ('systemctl') (unit session-5.scope)... Dec 21 11:33:50 managed-node2 systemd[1]: Reloading... Dec 21 11:33:50 managed-node2 systemd[1]: Reloading finished in 220 ms. Dec 21 11:33:51 managed-node2 python3.12[39740]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:33:51 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Dec 21 11:33:51 managed-node2 python3.12[39875]: ansible-slurp Invoked with path=/etc/containers/systemd/quadlet-demo.yml src=/etc/containers/systemd/quadlet-demo.yml Dec 21 11:33:52 managed-node2 python3.12[40028]: ansible-file Invoked with path=/tmp/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:33:53 managed-node2 python3.12[40159]: ansible-file Invoked with path=/tmp/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:34:04 managed-node2 podman[40299]: 2024-12-21 11:34:04.905781344 -0500 EST m=+11.319455823 image pull fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache Dec 21 11:34:09 managed-node2 systemd[22955]: Starting grub-boot-success.service - Mark boot as successful... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 118. Dec 21 11:34:10 managed-node2 systemd[22955]: Finished grub-boot-success.service - Mark boot as successful. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 118. Dec 21 11:34:10 managed-node2 podman[40718]: 2024-12-21 11:34:10.382566219 -0500 EST m=+5.015616934 image pull 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0 Dec 21 11:34:10 managed-node2 python3.12[40983]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:34:11 managed-node2 python3.12[41114]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.kube follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 21 11:34:11 managed-node2 python3.12[41219]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1734798850.9322016-20524-56637568013428/.source.kube dest=/etc/containers/systemd/quadlet-demo.kube owner=root group=0 mode=0644 _original_basename=quadlet-demo.kube follow=False checksum=7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:34:12 managed-node2 python3.12[41350]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 21 11:34:12 managed-node2 systemd[1]: Reload requested from client PID 41351 ('systemctl') (unit session-5.scope)... Dec 21 11:34:12 managed-node2 systemd[1]: Reloading... Dec 21 11:34:12 managed-node2 podman[41352]: 2024-12-21 11:34:12.317135096 -0500 EST m=+0.156343617 container health_status 203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474 (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=0, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Dec 21 11:34:12 managed-node2 systemd[1]: Reloading finished in 271 ms. Dec 21 11:34:12 managed-node2 python3.12[41553]: ansible-systemd Invoked with name=quadlet-demo.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Dec 21 11:34:12 managed-node2 systemd[1]: Starting quadlet-demo.service... ░░ Subject: A start job for unit quadlet-demo.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo.service has begun execution. ░░ ░░ The job identifier is 3343. Dec 21 11:34:13 managed-node2 quadlet-demo[41557]: Pods stopped: Dec 21 11:34:13 managed-node2 quadlet-demo[41557]: Pods removed: Dec 21 11:34:13 managed-node2 quadlet-demo[41557]: Secrets removed: Dec 21 11:34:13 managed-node2 quadlet-demo[41557]: Volumes removed: Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.046407256 -0500 EST m=+0.035120762 volume create wp-pv-claim Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.07359806 -0500 EST m=+0.062311564 container create 9f1e833e8645852665a22030536ca5c52cfec7decb76585438cf0051c8f4bf45 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.081892004 -0500 EST m=+0.070605523 volume create envoy-proxy-config Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.088568557 -0500 EST m=+0.077282063 volume create envoy-certificates Dec 21 11:34:13 managed-node2 systemd[1]: Created slice machine-libpod_pod_a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40.slice - cgroup machine-libpod_pod_a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40.slice. ░░ Subject: A start job for unit machine-libpod_pod_a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40.slice has finished successfully. ░░ ░░ The job identifier is 3430. Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.136860015 -0500 EST m=+0.125573521 container create a0a9c1950f0b57658663ef8cf7f88c8aed610a9f8597ed9521c2c45698fab2ba (image=localhost/podman-pause:5.3.1-1733097600, name=a20a2c426a00-infra, pod_id=a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.142994513 -0500 EST m=+0.131708015 pod create a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40 (image=, name=quadlet-demo) Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.184907236 -0500 EST m=+0.173620880 container create ab1ed70273076e1de0bfb5306194f08cce3c9aee5af296286eb7e8f0bd01dbb8 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.214657838 -0500 EST m=+0.203371344 container create d62af06fe80a7ce9e88ea311b2b6715b83549919bfb828573488c6dd1851f008 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.21502922 -0500 EST m=+0.203742738 container restart 9f1e833e8645852665a22030536ca5c52cfec7decb76585438cf0051c8f4bf45 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.152599938 -0500 EST m=+0.141313669 image pull fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.18931206 -0500 EST m=+0.178025706 image pull 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0 Dec 21 11:34:13 managed-node2 systemd[1]: Started libpod-9f1e833e8645852665a22030536ca5c52cfec7decb76585438cf0051c8f4bf45.scope - libcrun container. ░░ Subject: A start job for unit libpod-9f1e833e8645852665a22030536ca5c52cfec7decb76585438cf0051c8f4bf45.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-9f1e833e8645852665a22030536ca5c52cfec7decb76585438cf0051c8f4bf45.scope has finished successfully. ░░ ░░ The job identifier is 3436. Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.265681715 -0500 EST m=+0.254395360 container init 9f1e833e8645852665a22030536ca5c52cfec7decb76585438cf0051c8f4bf45 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.269159348 -0500 EST m=+0.257872902 container start 9f1e833e8645852665a22030536ca5c52cfec7decb76585438cf0051c8f4bf45 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 21 11:34:13 managed-node2 kernel: podman2: port 2(veth3) entered blocking state Dec 21 11:34:13 managed-node2 kernel: podman2: port 2(veth3) entered disabled state Dec 21 11:34:13 managed-node2 kernel: veth3: entered allmulticast mode Dec 21 11:34:13 managed-node2 kernel: veth3: entered promiscuous mode Dec 21 11:34:13 managed-node2 kernel: podman2: port 2(veth3) entered blocking state Dec 21 11:34:13 managed-node2 kernel: podman2: port 2(veth3) entered forwarding state Dec 21 11:34:13 managed-node2 NetworkManager[775]: [1734798853.3044] manager: (veth3): new Veth device (/org/freedesktop/NetworkManager/Devices/11) Dec 21 11:34:13 managed-node2 NetworkManager[775]: [1734798853.3057] device (veth3): carrier: link connected Dec 21 11:34:13 managed-node2 (udev-worker)[41577]: Network interface NamePolicy= disabled on kernel command line. Dec 21 11:34:13 managed-node2 systemd[1]: Started libpod-a0a9c1950f0b57658663ef8cf7f88c8aed610a9f8597ed9521c2c45698fab2ba.scope - libcrun container. ░░ Subject: A start job for unit libpod-a0a9c1950f0b57658663ef8cf7f88c8aed610a9f8597ed9521c2c45698fab2ba.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-a0a9c1950f0b57658663ef8cf7f88c8aed610a9f8597ed9521c2c45698fab2ba.scope has finished successfully. ░░ ░░ The job identifier is 3443. Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.392061159 -0500 EST m=+0.380774732 container init a0a9c1950f0b57658663ef8cf7f88c8aed610a9f8597ed9521c2c45698fab2ba (image=localhost/podman-pause:5.3.1-1733097600, name=a20a2c426a00-infra, pod_id=a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.395158122 -0500 EST m=+0.383871707 container start a0a9c1950f0b57658663ef8cf7f88c8aed610a9f8597ed9521c2c45698fab2ba (image=localhost/podman-pause:5.3.1-1733097600, name=a20a2c426a00-infra, pod_id=a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Dec 21 11:34:13 managed-node2 systemd[1]: Started libpod-ab1ed70273076e1de0bfb5306194f08cce3c9aee5af296286eb7e8f0bd01dbb8.scope - libcrun container. ░░ Subject: A start job for unit libpod-ab1ed70273076e1de0bfb5306194f08cce3c9aee5af296286eb7e8f0bd01dbb8.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-ab1ed70273076e1de0bfb5306194f08cce3c9aee5af296286eb7e8f0bd01dbb8.scope has finished successfully. ░░ ░░ The job identifier is 3450. Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.45996148 -0500 EST m=+0.448675022 container init ab1ed70273076e1de0bfb5306194f08cce3c9aee5af296286eb7e8f0bd01dbb8 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.462528439 -0500 EST m=+0.451242015 container start ab1ed70273076e1de0bfb5306194f08cce3c9aee5af296286eb7e8f0bd01dbb8 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 21 11:34:13 managed-node2 systemd[1]: Started libpod-d62af06fe80a7ce9e88ea311b2b6715b83549919bfb828573488c6dd1851f008.scope - libcrun container. ░░ Subject: A start job for unit libpod-d62af06fe80a7ce9e88ea311b2b6715b83549919bfb828573488c6dd1851f008.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-d62af06fe80a7ce9e88ea311b2b6715b83549919bfb828573488c6dd1851f008.scope has finished successfully. ░░ ░░ The job identifier is 3457. Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.536515665 -0500 EST m=+0.525229213 container init d62af06fe80a7ce9e88ea311b2b6715b83549919bfb828573488c6dd1851f008 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.538973229 -0500 EST m=+0.527686824 container start d62af06fe80a7ce9e88ea311b2b6715b83549919bfb828573488c6dd1851f008 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 21 11:34:13 managed-node2 podman[41557]: 2024-12-21 11:34:13.546111037 -0500 EST m=+0.534824617 pod start a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40 (image=, name=quadlet-demo) Dec 21 11:34:13 managed-node2 quadlet-demo[41557]: Volumes: Dec 21 11:34:13 managed-node2 systemd[1]: Started quadlet-demo.service. ░░ Subject: A start job for unit quadlet-demo.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo.service has finished successfully. ░░ ░░ The job identifier is 3343. Dec 21 11:34:13 managed-node2 quadlet-demo[41557]: wp-pv-claim Dec 21 11:34:13 managed-node2 quadlet-demo[41557]: Pod: Dec 21 11:34:13 managed-node2 quadlet-demo[41557]: a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40 Dec 21 11:34:13 managed-node2 quadlet-demo[41557]: Containers: Dec 21 11:34:13 managed-node2 quadlet-demo[41557]: ab1ed70273076e1de0bfb5306194f08cce3c9aee5af296286eb7e8f0bd01dbb8 Dec 21 11:34:13 managed-node2 quadlet-demo[41557]: d62af06fe80a7ce9e88ea311b2b6715b83549919bfb828573488c6dd1851f008 Dec 21 11:34:14 managed-node2 python3.12[41820]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /etc/containers/systemd _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:34:14 managed-node2 python3.12[42027]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps -a _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:34:15 managed-node2 python3.12[42217]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:34:15 managed-node2 python3.12[42356]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod ps --ctr-ids --ctr-names --ctr-status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:34:16 managed-node2 python3.12[42495]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail; systemctl list-units | grep quadlet _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:34:16 managed-node2 python3.12[42629]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:34:22 managed-node2 python3.12[42760]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:34:27 managed-node2 python3.12[42891]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:34:33 managed-node2 python3.12[43022]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:34:38 managed-node2 python3.12[43153]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:34:43 managed-node2 podman[43175]: 2024-12-21 11:34:43.112259851 -0500 EST m=+0.102896688 container health_status 203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474 (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=0, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Dec 21 11:34:44 managed-node2 python3.12[43300]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:34:49 managed-node2 python3.12[43431]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:34:49 managed-node2 python3.12[43562]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:34:50 managed-node2 python3.12[43694]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps -a _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:34:50 managed-node2 python3.12[43832]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod ps --ctr-ids --ctr-names --ctr-status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:34:51 managed-node2 python3.12[43972]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail; systemctl list-units --all | grep quadlet _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:34:51 managed-node2 python3.12[44106]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /etc/systemd/system _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:34:53 managed-node2 python3.12[44369]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:34:54 managed-node2 python3.12[44506]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:34:56 managed-node2 python3.12[44639]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 21 11:34:57 managed-node2 python3.12[44771]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Dec 21 11:34:57 managed-node2 python3.12[44904]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Dec 21 11:34:58 managed-node2 python3.12[45037]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Dec 21 11:34:58 managed-node2 python3.12[45168]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Dec 21 11:35:03 managed-node2 python3.12[45716]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:35:05 managed-node2 python3.12[45849]: ansible-systemd Invoked with name=quadlet-demo.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Dec 21 11:35:05 managed-node2 systemd[1]: Reload requested from client PID 45852 ('systemctl') (unit session-5.scope)... Dec 21 11:35:05 managed-node2 systemd[1]: Reloading... Dec 21 11:35:05 managed-node2 systemd[1]: Reloading finished in 234 ms. Dec 21 11:35:05 managed-node2 systemd[1]: Stopping quadlet-demo.service... ░░ Subject: A stop job for unit quadlet-demo.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit quadlet-demo.service has begun execution. ░░ ░░ The job identifier is 3542. Dec 21 11:35:05 managed-node2 systemd[1]: libpod-9f1e833e8645852665a22030536ca5c52cfec7decb76585438cf0051c8f4bf45.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-9f1e833e8645852665a22030536ca5c52cfec7decb76585438cf0051c8f4bf45.scope has successfully entered the 'dead' state. Dec 21 11:35:05 managed-node2 podman[45910]: 2024-12-21 11:35:05.744846325 -0500 EST m=+0.022923439 container died 9f1e833e8645852665a22030536ca5c52cfec7decb76585438cf0051c8f4bf45 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 21 11:35:05 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-9f1e833e8645852665a22030536ca5c52cfec7decb76585438cf0051c8f4bf45-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-9f1e833e8645852665a22030536ca5c52cfec7decb76585438cf0051c8f4bf45-userdata-shm.mount has successfully entered the 'dead' state. Dec 21 11:35:05 managed-node2 systemd[1]: var-lib-containers-storage-overlay-bdee1dc276b2c7beb520703923be1458706e0ffcef73b440186a445bf3df565f-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-bdee1dc276b2c7beb520703923be1458706e0ffcef73b440186a445bf3df565f-merged.mount has successfully entered the 'dead' state. Dec 21 11:35:05 managed-node2 podman[45910]: 2024-12-21 11:35:05.788318713 -0500 EST m=+0.066395894 container cleanup 9f1e833e8645852665a22030536ca5c52cfec7decb76585438cf0051c8f4bf45 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 21 11:35:05 managed-node2 podman[45918]: 2024-12-21 11:35:05.833907567 -0500 EST m=+0.025266689 pod stop a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40 (image=, name=quadlet-demo) Dec 21 11:35:05 managed-node2 systemd[1]: libpod-a0a9c1950f0b57658663ef8cf7f88c8aed610a9f8597ed9521c2c45698fab2ba.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-a0a9c1950f0b57658663ef8cf7f88c8aed610a9f8597ed9521c2c45698fab2ba.scope has successfully entered the 'dead' state. Dec 21 11:35:05 managed-node2 podman[45918]: 2024-12-21 11:35:05.85867501 -0500 EST m=+0.050034354 container died a0a9c1950f0b57658663ef8cf7f88c8aed610a9f8597ed9521c2c45698fab2ba (image=localhost/podman-pause:5.3.1-1733097600, name=a20a2c426a00-infra, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Dec 21 11:35:05 managed-node2 kernel: podman2: port 2(veth3) entered disabled state Dec 21 11:35:05 managed-node2 kernel: veth3 (unregistering): left allmulticast mode Dec 21 11:35:05 managed-node2 kernel: veth3 (unregistering): left promiscuous mode Dec 21 11:35:05 managed-node2 kernel: podman2: port 2(veth3) entered disabled state Dec 21 11:35:05 managed-node2 systemd[1]: libpod-d62af06fe80a7ce9e88ea311b2b6715b83549919bfb828573488c6dd1851f008.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-d62af06fe80a7ce9e88ea311b2b6715b83549919bfb828573488c6dd1851f008.scope has successfully entered the 'dead' state. Dec 21 11:35:05 managed-node2 conmon[41615]: conmon d62af06fe80a7ce9e88e : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40.slice/libpod-d62af06fe80a7ce9e88ea311b2b6715b83549919bfb828573488c6dd1851f008.scope/container/memory.events Dec 21 11:35:05 managed-node2 podman[45918]: 2024-12-21 11:35:05.922304233 -0500 EST m=+0.113663510 container stop d62af06fe80a7ce9e88ea311b2b6715b83549919bfb828573488c6dd1851f008 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 21 11:35:05 managed-node2 systemd[1]: run-netns-netns\x2d400590e2\x2d4854\x2d6634\x2d8711\x2dec11b0010d66.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2d400590e2\x2d4854\x2d6634\x2d8711\x2dec11b0010d66.mount has successfully entered the 'dead' state. Dec 21 11:35:05 managed-node2 podman[45918]: 2024-12-21 11:35:05.940228584 -0500 EST m=+0.131587934 container died d62af06fe80a7ce9e88ea311b2b6715b83549919bfb828573488c6dd1851f008 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 21 11:35:05 managed-node2 systemd[1]: libpod-ab1ed70273076e1de0bfb5306194f08cce3c9aee5af296286eb7e8f0bd01dbb8.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-ab1ed70273076e1de0bfb5306194f08cce3c9aee5af296286eb7e8f0bd01dbb8.scope has successfully entered the 'dead' state. Dec 21 11:35:05 managed-node2 podman[45918]: 2024-12-21 11:35:05.985144722 -0500 EST m=+0.176504005 container died ab1ed70273076e1de0bfb5306194f08cce3c9aee5af296286eb7e8f0bd01dbb8 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 21 11:35:05 managed-node2 systemd[1]: var-lib-containers-storage-overlay-137680d648b1dab7174afee90d88f8b5548c43d4727dbb71210aabd77d479474-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-137680d648b1dab7174afee90d88f8b5548c43d4727dbb71210aabd77d479474-merged.mount has successfully entered the 'dead' state. Dec 21 11:35:06 managed-node2 systemd[1]: var-lib-containers-storage-overlay-afa5f6ddebe86f0f648c666f83a01fb4f59823f274266ec0ebbb1c941a8a3b93-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-afa5f6ddebe86f0f648c666f83a01fb4f59823f274266ec0ebbb1c941a8a3b93-merged.mount has successfully entered the 'dead' state. Dec 21 11:35:06 managed-node2 podman[45918]: 2024-12-21 11:35:06.040692485 -0500 EST m=+0.232051483 container cleanup a0a9c1950f0b57658663ef8cf7f88c8aed610a9f8597ed9521c2c45698fab2ba (image=localhost/podman-pause:5.3.1-1733097600, name=a20a2c426a00-infra, pod_id=a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 21 11:35:06 managed-node2 podman[45918]: 2024-12-21 11:35:06.053870025 -0500 EST m=+0.245229302 container cleanup ab1ed70273076e1de0bfb5306194f08cce3c9aee5af296286eb7e8f0bd01dbb8 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 21 11:35:06 managed-node2 podman[45918]: 2024-12-21 11:35:06.062490527 -0500 EST m=+0.253850402 container cleanup d62af06fe80a7ce9e88ea311b2b6715b83549919bfb828573488c6dd1851f008 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 21 11:35:06 managed-node2 systemd[1]: Removed slice machine-libpod_pod_a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40.slice - cgroup machine-libpod_pod_a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40.slice. ░░ Subject: A stop job for unit machine-libpod_pod_a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40.slice has finished. ░░ ░░ The job identifier is 3544 and the job result is done. Dec 21 11:35:06 managed-node2 podman[45918]: 2024-12-21 11:35:06.092958588 -0500 EST m=+0.284317600 container remove ab1ed70273076e1de0bfb5306194f08cce3c9aee5af296286eb7e8f0bd01dbb8 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 21 11:35:06 managed-node2 podman[45918]: 2024-12-21 11:35:06.114810094 -0500 EST m=+0.306169103 container remove d62af06fe80a7ce9e88ea311b2b6715b83549919bfb828573488c6dd1851f008 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 21 11:35:06 managed-node2 podman[45918]: 2024-12-21 11:35:06.142849122 -0500 EST m=+0.334208126 container remove a0a9c1950f0b57658663ef8cf7f88c8aed610a9f8597ed9521c2c45698fab2ba (image=localhost/podman-pause:5.3.1-1733097600, name=a20a2c426a00-infra, pod_id=a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Dec 21 11:35:06 managed-node2 systemd[1]: machine-libpod_pod_a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40.slice: Failed to open /run/systemd/transient/machine-libpod_pod_a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40.slice: No such file or directory Dec 21 11:35:06 managed-node2 podman[45918]: 2024-12-21 11:35:06.152499281 -0500 EST m=+0.343858282 pod remove a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40 (image=, name=quadlet-demo) Dec 21 11:35:06 managed-node2 podman[45918]: 2024-12-21 11:35:06.18134077 -0500 EST m=+0.372699772 container remove 9f1e833e8645852665a22030536ca5c52cfec7decb76585438cf0051c8f4bf45 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 21 11:35:06 managed-node2 quadlet-demo[45918]: Pods stopped: Dec 21 11:35:06 managed-node2 quadlet-demo[45918]: a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40 Dec 21 11:35:06 managed-node2 quadlet-demo[45918]: Pods removed: Dec 21 11:35:06 managed-node2 quadlet-demo[45918]: a20a2c426a00a1f74838432dd0ff8faaf4aa5e19add83428ac3159dd95dadc40 Dec 21 11:35:06 managed-node2 quadlet-demo[45918]: Secrets removed: Dec 21 11:35:06 managed-node2 quadlet-demo[45918]: Volumes removed: Dec 21 11:35:06 managed-node2 systemd[1]: quadlet-demo.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-demo.service has successfully entered the 'dead' state. Dec 21 11:35:06 managed-node2 systemd[1]: Stopped quadlet-demo.service. ░░ Subject: A stop job for unit quadlet-demo.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit quadlet-demo.service has finished. ░░ ░░ The job identifier is 3542 and the job result is done. Dec 21 11:35:06 managed-node2 python3.12[46094]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo.kube follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:35:06 managed-node2 systemd[1]: var-lib-containers-storage-overlay-d371fa9df21de61df301c4eb8aea3317e2342ce3c3bf08870c4935df52fc6af0-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-d371fa9df21de61df301c4eb8aea3317e2342ce3c3bf08870c4935df52fc6af0-merged.mount has successfully entered the 'dead' state. Dec 21 11:35:06 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-a0a9c1950f0b57658663ef8cf7f88c8aed610a9f8597ed9521c2c45698fab2ba-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-a0a9c1950f0b57658663ef8cf7f88c8aed610a9f8597ed9521c2c45698fab2ba-userdata-shm.mount has successfully entered the 'dead' state. Dec 21 11:35:07 managed-node2 python3.12[46358]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo.kube state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:35:08 managed-node2 python3.12[46489]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 21 11:35:08 managed-node2 systemd[1]: Reload requested from client PID 46490 ('systemctl') (unit session-5.scope)... Dec 21 11:35:08 managed-node2 systemd[1]: Reloading... Dec 21 11:35:08 managed-node2 systemd[1]: Reloading finished in 225 ms. Dec 21 11:35:09 managed-node2 python3.12[46676]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:35:09 managed-node2 podman[46677]: 2024-12-21 11:35:09.600873335 -0500 EST m=+0.560213231 image untag fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache Dec 21 11:35:09 managed-node2 podman[46677]: 2024-12-21 11:35:09.059683749 -0500 EST m=+0.019023774 image remove fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b Dec 21 11:35:09 managed-node2 podman[46677]: 2024-12-21 11:35:09.747720252 -0500 EST m=+0.707060083 image untag 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0 Dec 21 11:35:09 managed-node2 podman[46677]: 2024-12-21 11:35:09.600885392 -0500 EST m=+0.560225184 image remove 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d Dec 21 11:35:10 managed-node2 python3.12[46815]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:35:10 managed-node2 python3.12[46953]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:35:11 managed-node2 python3.12[47092]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:35:11 managed-node2 python3.12[47230]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:35:13 managed-node2 python3.12[47646]: ansible-service_facts Invoked Dec 21 11:35:13 managed-node2 podman[47652]: 2024-12-21 11:35:13.302621671 -0500 EST m=+0.072976883 container health_status 203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474 (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=1, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Dec 21 11:35:13 managed-node2 systemd[1]: 203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474-503f4df31c37e8b6.service: Main process exited, code=exited, status=125/n/a ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit 203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474-503f4df31c37e8b6.service has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 125. Dec 21 11:35:13 managed-node2 systemd[1]: 203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474-503f4df31c37e8b6.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit 203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474-503f4df31c37e8b6.service has entered the 'failed' state with result 'exit-code'. Dec 21 11:35:16 managed-node2 python3.12[47893]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:35:17 managed-node2 python3.12[48026]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:35:18 managed-node2 python3.12[48290]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:35:19 managed-node2 python3.12[48421]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 21 11:35:19 managed-node2 systemd[1]: Reload requested from client PID 48422 ('systemctl') (unit session-5.scope)... Dec 21 11:35:19 managed-node2 systemd[1]: Reloading... Dec 21 11:35:19 managed-node2 systemd[1]: Reloading finished in 225 ms. Dec 21 11:35:20 managed-node2 podman[48609]: 2024-12-21 11:35:20.135366445 -0500 EST m=+0.025119006 volume remove envoy-proxy-config Dec 21 11:35:20 managed-node2 podman[48748]: 2024-12-21 11:35:20.546896289 -0500 EST m=+0.023636159 volume remove envoy-certificates Dec 21 11:35:21 managed-node2 podman[48886]: 2024-12-21 11:35:21.007371108 -0500 EST m=+0.059085324 volume remove wp-pv-claim Dec 21 11:35:21 managed-node2 python3.12[49024]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:35:22 managed-node2 python3.12[49163]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:35:22 managed-node2 python3.12[49302]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:35:23 managed-node2 python3.12[49441]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:35:23 managed-node2 python3.12[49581]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:35:25 managed-node2 python3.12[49996]: ansible-service_facts Invoked Dec 21 11:35:27 managed-node2 python3.12[50234]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:35:29 managed-node2 python3.12[50367]: ansible-stat Invoked with path=/etc/containers/systemd/envoy-proxy-configmap.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:35:30 managed-node2 python3.12[50631]: ansible-file Invoked with path=/etc/containers/systemd/envoy-proxy-configmap.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:35:30 managed-node2 python3.12[50762]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 21 11:35:30 managed-node2 systemd[1]: Reload requested from client PID 50763 ('systemctl') (unit session-5.scope)... Dec 21 11:35:30 managed-node2 systemd[1]: Reloading... Dec 21 11:35:31 managed-node2 systemd[1]: Reloading finished in 220 ms. Dec 21 11:35:31 managed-node2 python3.12[50950]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:35:32 managed-node2 python3.12[51089]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:35:33 managed-node2 python3.12[51227]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:35:33 managed-node2 python3.12[51364]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:35:33 managed-node2 python3.12[51502]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:35:35 managed-node2 python3.12[51918]: ansible-service_facts Invoked Dec 21 11:35:39 managed-node2 python3.12[52156]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:35:40 managed-node2 python3.12[52289]: ansible-systemd Invoked with name=quadlet-demo-mysql.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Dec 21 11:35:40 managed-node2 systemd[1]: Reload requested from client PID 52292 ('systemctl') (unit session-5.scope)... Dec 21 11:35:40 managed-node2 systemd[1]: Reloading... Dec 21 11:35:41 managed-node2 systemd[1]: Reloading finished in 216 ms. Dec 21 11:35:41 managed-node2 systemd[1]: Stopping quadlet-demo-mysql.service... ░░ Subject: A stop job for unit quadlet-demo-mysql.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit quadlet-demo-mysql.service has begun execution. ░░ ░░ The job identifier is 3623. Dec 21 11:35:42 managed-node2 podman[52350]: 2024-12-21 11:35:42.94871072 -0500 EST m=+1.773741995 container died 203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474 (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Dec 21 11:35:42 managed-node2 systemd[1]: 203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474-503f4df31c37e8b6.timer: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit 203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474-503f4df31c37e8b6.timer has successfully entered the 'dead' state. Dec 21 11:35:42 managed-node2 systemd[1]: Stopped 203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474-503f4df31c37e8b6.timer - /usr/bin/podman healthcheck run 203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474. ░░ Subject: A stop job for unit 203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474-503f4df31c37e8b6.timer has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit 203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474-503f4df31c37e8b6.timer has finished. ░░ ░░ The job identifier is 3624 and the job result is done. Dec 21 11:35:42 managed-node2 kernel: podman2: port 1(veth2) entered disabled state Dec 21 11:35:42 managed-node2 kernel: veth2 (unregistering): left allmulticast mode Dec 21 11:35:42 managed-node2 kernel: veth2 (unregistering): left promiscuous mode Dec 21 11:35:42 managed-node2 kernel: podman2: port 1(veth2) entered disabled state Dec 21 11:35:43 managed-node2 NetworkManager[775]: [1734798943.0022] device (podman2): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Dec 21 11:35:43 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 3627. Dec 21 11:35:43 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 3627. Dec 21 11:35:43 managed-node2 systemd[1]: run-netns-netns\x2d8f4d5636\x2d7555\x2dcb6b\x2dc021\x2dc8802fa45565.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2d8f4d5636\x2d7555\x2dcb6b\x2dc021\x2dc8802fa45565.mount has successfully entered the 'dead' state. Dec 21 11:35:43 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474-userdata-shm.mount has successfully entered the 'dead' state. Dec 21 11:35:43 managed-node2 systemd[1]: var-lib-containers-storage-overlay-0e9d761c7a89da80c7d060693f55e11575e81bea8267e3b388ce950b6c44be23-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-0e9d761c7a89da80c7d060693f55e11575e81bea8267e3b388ce950b6c44be23-merged.mount has successfully entered the 'dead' state. Dec 21 11:35:43 managed-node2 podman[52350]: 2024-12-21 11:35:43.121829451 -0500 EST m=+1.946860596 container remove 203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474 (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Dec 21 11:35:43 managed-node2 quadlet-demo-mysql[52350]: 203d365a6d084b3bdf58955b1edc347bdf9a1b3924c98add11459ee5b5619474 Dec 21 11:35:43 managed-node2 systemd[1]: quadlet-demo-mysql.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-demo-mysql.service has successfully entered the 'dead' state. Dec 21 11:35:43 managed-node2 systemd[1]: Stopped quadlet-demo-mysql.service. ░░ Subject: A stop job for unit quadlet-demo-mysql.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit quadlet-demo-mysql.service has finished. ░░ ░░ The job identifier is 3623 and the job result is done. Dec 21 11:35:43 managed-node2 python3.12[52527]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:35:44 managed-node2 python3.12[52791]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.container state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:35:45 managed-node2 python3.12[52922]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 21 11:35:45 managed-node2 systemd[1]: Reload requested from client PID 52923 ('systemctl') (unit session-5.scope)... Dec 21 11:35:45 managed-node2 systemd[1]: Reloading... Dec 21 11:35:45 managed-node2 systemd[1]: Reloading finished in 218 ms. Dec 21 11:35:46 managed-node2 python3.12[53249]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:35:47 managed-node2 podman[53250]: 2024-12-21 11:35:47.032972497 -0500 EST m=+0.243757942 image untag dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6 Dec 21 11:35:47 managed-node2 podman[53250]: 2024-12-21 11:35:46.806619928 -0500 EST m=+0.017405378 image remove dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 Dec 21 11:35:47 managed-node2 python3.12[53389]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:35:48 managed-node2 python3.12[53528]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:35:48 managed-node2 python3.12[53666]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:35:48 managed-node2 python3.12[53805]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:35:50 managed-node2 python3.12[54221]: ansible-service_facts Invoked Dec 21 11:35:52 managed-node2 python3.12[54458]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:35:53 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Dec 21 11:35:54 managed-node2 python3.12[54592]: ansible-systemd Invoked with name=quadlet-demo-mysql-volume.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Dec 21 11:35:54 managed-node2 systemd[1]: Reload requested from client PID 54595 ('systemctl') (unit session-5.scope)... Dec 21 11:35:54 managed-node2 systemd[1]: Reloading... Dec 21 11:35:54 managed-node2 systemd[1]: Reloading finished in 215 ms. Dec 21 11:35:54 managed-node2 systemd[1]: quadlet-demo-mysql-volume.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-demo-mysql-volume.service has successfully entered the 'dead' state. Dec 21 11:35:54 managed-node2 systemd[1]: Stopped quadlet-demo-mysql-volume.service. ░░ Subject: A stop job for unit quadlet-demo-mysql-volume.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit quadlet-demo-mysql-volume.service has finished. ░░ ░░ The job identifier is 3706 and the job result is done. Dec 21 11:35:54 managed-node2 python3.12[54784]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.volume follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:35:55 managed-node2 python3.12[55048]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.volume state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:35:56 managed-node2 python3.12[55179]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 21 11:35:56 managed-node2 systemd[1]: Reload requested from client PID 55180 ('systemctl') (unit session-5.scope)... Dec 21 11:35:56 managed-node2 systemd[1]: Reloading... Dec 21 11:35:56 managed-node2 systemd[1]: Reloading finished in 214 ms. Dec 21 11:35:56 managed-node2 podman[55367]: 2024-12-21 11:35:56.990727077 -0500 EST m=+0.029490672 volume remove systemd-quadlet-demo-mysql Dec 21 11:35:57 managed-node2 python3.12[55505]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:35:58 managed-node2 python3.12[55644]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:35:58 managed-node2 python3.12[55782]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:35:59 managed-node2 python3.12[55920]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:35:59 managed-node2 python3.12[56058]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:36:00 managed-node2 python3.12[56475]: ansible-service_facts Invoked Dec 21 11:36:03 managed-node2 python3.12[56713]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:36:05 managed-node2 python3.12[56846]: ansible-systemd Invoked with name=quadlet-demo-network.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Dec 21 11:36:05 managed-node2 systemd[1]: Reload requested from client PID 56849 ('systemctl') (unit session-5.scope)... Dec 21 11:36:05 managed-node2 systemd[1]: Reloading... Dec 21 11:36:05 managed-node2 systemd[1]: Reloading finished in 215 ms. Dec 21 11:36:05 managed-node2 systemd[1]: quadlet-demo-network.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-demo-network.service has successfully entered the 'dead' state. Dec 21 11:36:05 managed-node2 systemd[1]: Stopped quadlet-demo-network.service. ░░ Subject: A stop job for unit quadlet-demo-network.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit quadlet-demo-network.service has finished. ░░ ░░ The job identifier is 3707 and the job result is done. Dec 21 11:36:05 managed-node2 python3.12[57037]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo.network follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 21 11:36:06 managed-node2 python3.12[57301]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo.network state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 21 11:36:07 managed-node2 python3.12[57432]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 21 11:36:07 managed-node2 systemd[1]: Reload requested from client PID 57433 ('systemctl') (unit session-5.scope)... Dec 21 11:36:07 managed-node2 systemd[1]: Reloading... Dec 21 11:36:07 managed-node2 systemd[1]: Reloading finished in 213 ms. Dec 21 11:36:08 managed-node2 python3.12[57758]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:36:09 managed-node2 python3.12[57896]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:36:09 managed-node2 python3.12[58034]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:36:10 managed-node2 python3.12[58172]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:36:10 managed-node2 python3.12[58311]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:36:11 managed-node2 python3.12[58726]: ansible-service_facts Invoked Dec 21 11:36:14 managed-node2 python3.12[58964]: ansible-ansible.legacy.command Invoked with _raw_params=exec 1>&2 set -x set -o pipefail systemctl list-units --plain -l --all | grep quadlet || : systemctl list-unit-files --all | grep quadlet || : systemctl list-units --plain --failed -l --all | grep quadlet || : _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 21 11:36:15 managed-node2 python3.12[59102]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None PLAY RECAP ********************************************************************* managed-node2 : ok=397 changed=47 unreachable=0 failed=2 skipped=376 rescued=2 ignored=0 TASKS RECAP ******************************************************************** Saturday 21 December 2024 11:36:15 -0500 (0:00:00.439) 0:03:24.290 ***** =============================================================================== Check web -------------------------------------------------------------- 33.44s /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:121 fedora.linux_system_roles.podman : Ensure container images are present -- 17.30s /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 fedora.linux_system_roles.podman : Ensure container images are present --- 6.39s /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 fedora.linux_system_roles.podman : For testing and debugging - services --- 3.18s /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.certificate : Ensure provider packages are installed --- 2.99s /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:23 fedora.linux_system_roles.podman : Stop and disable service ------------- 2.88s /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 fedora.linux_system_roles.certificate : Ensure certificate role dependencies are installed --- 2.66s /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:5 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.28s /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.14s /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.13s /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.10s /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.03s /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Gathering Facts --------------------------------------------------------- 1.49s /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:9 fedora.linux_system_roles.podman : Stop and disable service ------------- 1.39s /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 fedora.linux_system_roles.podman : Remove volumes ----------------------- 1.38s /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 fedora.linux_system_roles.firewall : Configure firewall ----------------- 1.22s /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71 fedora.linux_system_roles.certificate : Slurp the contents of the files --- 1.21s /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:152 fedora.linux_system_roles.podman : Start service ------------------------ 1.19s /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 fedora.linux_system_roles.podman : Prune images no longer in use -------- 1.15s /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 fedora.linux_system_roles.certificate : Ensure provider service is running --- 1.12s /tmp/collections-wP6/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:90