ansible-playbook [core 2.17.7] config file = None configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/local/lib/python3.12/site-packages/ansible ansible collection location = /tmp/collections-Zud executable location = /usr/local/bin/ansible-playbook python version = 3.12.8 (main, Dec 3 2024, 00:00:00) [GCC 14.2.1 20241104 (Red Hat 14.2.1-6)] (/usr/bin/python3.12) jinja version = 3.1.4 libyaml = True No config file found; using defaults running playbook inside collection fedora.linux_system_roles redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.profile_tasks to ansible.posix.profile_tasks Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_quadlet_demo.yml *********************************************** 2 plays in /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml PLAY [all] ********************************************************************* TASK [Include vault variables] ************************************************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:5 Saturday 28 December 2024 11:33:09 -0500 (0:00:00.007) 0:00:00.007 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_test_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n35383939616163653333633431363463313831383037386236646138333162396161356130303461\n3932623930643263313563336163316337643562333936360a363538636631313039343233383732\n38666530383538656639363465313230343533386130303833336434303438333161656262346562\n3362626538613031640a663330613638366132356534363534353239616666653466353961323533\n6565\n" }, "mysql_container_root_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n61333932373230333539663035366431326163363166363036323963623131363530326231303634\n6635326161643165363366323062333334363730376631660a393566366139353861656364656661\n38653463363837336639363032646433666361646535366137303464623261313663643336306465\n6264663730656337310a343962353137386238383064646533366433333437303566656433386233\n34343235326665646661623131643335313236313131353661386338343366316261643634653633\n3832313034366536616531323963333234326461353130303532\n" } }, "ansible_included_var_files": [ "/tmp/podman-ggq/tests/vars/vault-variables.yml" ], "changed": false } PLAY [Deploy the quadlet demo app] ********************************************* TASK [Gathering Facts] ********************************************************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:9 Saturday 28 December 2024 11:33:09 -0500 (0:00:00.022) 0:00:00.030 ***** [WARNING]: Platform linux on host managed-node2 is using the discovered Python interpreter at /usr/bin/python3.12, but future installation of another Python interpreter could change the meaning of that path. See https://docs.ansible.com/ansible- core/2.17/reference_appendices/interpreter_discovery.html for more information. ok: [managed-node2] TASK [Test is only supported on x86_64] **************************************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:38 Saturday 28 December 2024 11:33:11 -0500 (0:00:01.465) 0:00:01.495 ***** skipping: [managed-node2] => { "false_condition": "ansible_facts[\"architecture\"] != \"x86_64\"" } TASK [End test] **************************************************************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:45 Saturday 28 December 2024 11:33:11 -0500 (0:00:00.022) 0:00:01.518 ***** META: end_play conditional evaluated to False, continuing play skipping: [managed-node2] => { "skip_reason": "end_play conditional evaluated to False, continuing play" } MSG: end_play TASK [Generate certificates] *************************************************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:51 Saturday 28 December 2024 11:33:11 -0500 (0:00:00.014) 0:00:01.533 ***** included: fedora.linux_system_roles.certificate for managed-node2 TASK [fedora.linux_system_roles.certificate : Set version specific variables] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:2 Saturday 28 December 2024 11:33:11 -0500 (0:00:00.051) 0:00:01.585 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.certificate : Ensure ansible_facts used by role] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:2 Saturday 28 December 2024 11:33:11 -0500 (0:00:00.022) 0:00:01.608 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__certificate_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.certificate : Check if system is ostree] ******* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:10 Saturday 28 December 2024 11:33:11 -0500 (0:00:00.037) 0:00:01.645 ***** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.certificate : Set flag to indicate system is ostree] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:15 Saturday 28 December 2024 11:33:11 -0500 (0:00:00.435) 0:00:02.081 ***** ok: [managed-node2] => { "ansible_facts": { "__certificate_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.certificate : Set platform/version specific variables] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:19 Saturday 28 December 2024 11:33:11 -0500 (0:00:00.025) 0:00:02.106 ***** skipping: [managed-node2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__certificate_certmonger_packages": [ "certmonger", "python3-packaging" ] }, "ansible_included_var_files": [ "/tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/certificate/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__certificate_certmonger_packages": [ "certmonger", "python3-packaging" ] }, "ansible_included_var_files": [ "/tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/certificate/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.certificate : Ensure certificate role dependencies are installed] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:5 Saturday 28 December 2024 11:33:11 -0500 (0:00:00.042) 0:00:02.148 ***** changed: [managed-node2] => { "changed": true, "rc": 0, "results": [ "Installed: python3-cffi-1.16.0-7.el10.x86_64", "Installed: python3-pyasn1-0.6.1-1.el10.noarch", "Installed: python3-cryptography-43.0.0-4.el10.x86_64", "Installed: python3-ply-3.11-25.el10.noarch", "Installed: python3-pycparser-2.20-16.el10.noarch" ] } lsrpackages: python3-cryptography python3-dbus python3-pyasn1 TASK [fedora.linux_system_roles.certificate : Ensure provider packages are installed] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:23 Saturday 28 December 2024 11:33:14 -0500 (0:00:02.296) 0:00:04.445 ***** changed: [managed-node2] => (item=certmonger) => { "__certificate_provider": "certmonger", "ansible_loop_var": "__certificate_provider", "changed": true, "rc": 0, "results": [ "Installed: nss-sysinit-3.101.0-13.el10.x86_64", "Installed: nss-util-3.101.0-13.el10.x86_64", "Installed: certmonger-0.79.20-3.el10.x86_64", "Installed: python3-packaging-23.2-6.el10.noarch", "Installed: dbus-tools-1:1.14.10-5.el10.x86_64", "Installed: nspr-4.35.0-34.el10.x86_64", "Installed: nss-3.101.0-13.el10.x86_64", "Installed: nss-softokn-3.101.0-13.el10.x86_64", "Installed: nss-softokn-freebl-3.101.0-13.el10.x86_64" ] } lsrpackages: certmonger python3-packaging TASK [fedora.linux_system_roles.certificate : Ensure pre-scripts hooks directory exists] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:35 Saturday 28 December 2024 11:33:17 -0500 (0:00:03.181) 0:00:07.626 ***** changed: [managed-node2] => (item=certmonger) => { "__certificate_provider": "certmonger", "ansible_loop_var": "__certificate_provider", "changed": true, "gid": 0, "group": "root", "mode": "0700", "owner": "root", "path": "/etc/certmonger//pre-scripts", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.certificate : Ensure post-scripts hooks directory exists] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:61 Saturday 28 December 2024 11:33:17 -0500 (0:00:00.535) 0:00:08.162 ***** changed: [managed-node2] => (item=certmonger) => { "__certificate_provider": "certmonger", "ansible_loop_var": "__certificate_provider", "changed": true, "gid": 0, "group": "root", "mode": "0700", "owner": "root", "path": "/etc/certmonger//post-scripts", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.certificate : Ensure provider service is running] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:90 Saturday 28 December 2024 11:33:18 -0500 (0:00:00.500) 0:00:08.663 ***** changed: [managed-node2] => (item=certmonger) => { "__certificate_provider": "certmonger", "ansible_loop_var": "__certificate_provider", "changed": true, "enabled": true, "name": "certmonger", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:certmonger_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "syslog.target basic.target dbus-broker.service dbus.socket system.slice systemd-journald.socket sysinit.target network.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedorahosted.certmonger", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "Certificate monitoring and PKI enrollment", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveTasksMax": "22349", "EnvironmentFiles": "/etc/sysconfig/certmonger (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/sbin/certmonger ; argv[]=/usr/sbin/certmonger -S -p /run/certmonger.pid -n $OPTS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/certmonger ; argv[]=/usr/sbin/certmonger -S -p /run/certmonger.pid -n $OPTS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/certmonger.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "certmonger.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3082366976", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "certmonger.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/certmonger.pid", "PartOf": "dbus-broker.service", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket sysinit.target system.slice", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "UtmpMode": "init", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.certificate : Ensure certificate requests] ***** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:101 Saturday 28 December 2024 11:33:19 -0500 (0:00:01.376) 0:00:10.039 ***** changed: [managed-node2] => (item={'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}) => { "ansible_loop_var": "item", "changed": true, "item": { "ca": "self-sign", "dns": [ "localhost" ], "name": "quadlet_demo" } } MSG: Certificate requested (new). TASK [fedora.linux_system_roles.certificate : Slurp the contents of the files] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:152 Saturday 28 December 2024 11:33:20 -0500 (0:00:00.966) 0:00:11.005 ***** ok: [managed-node2] => (item=['cert', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => { "ansible_loop_var": "item", "changed": false, "content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURnakNDQW1xZ0F3SUJBZ0lRQ0o0VEFlbTZSb200U0FYNk9XQWtlREFOQmdrcWhraUc5dzBCQVFzRkFEQlEKTVNBd0hnWURWUVFEREJkTWIyTmhiQ0JUYVdkdWFXNW5JRUYxZEdodmNtbDBlVEVzTUNvR0ExVUVBd3dqTURnNQpaVEV6TURFdFpUbGlZVFEyT0RrdFlqZzBPREExWm1FdE16azJNREkwTnpjd0hoY05NalF4TWpJNE1UWXpNekl3CldoY05NalV4TWpJNE1UWXpNekU1V2pBVU1SSXdFQVlEVlFRREV3bHNiMk5oYkdodmMzUXdnZ0VpTUEwR0NTcUcKU0liM0RRRUJBUVVBQTRJQkR3QXdnZ0VLQW9JQkFRQ3A3T0tKTGJkUlNiN05CUWJIdE5uRGRVTzY4aHE1RHJQago5TFVDZnd4SUZ4Z1grWFl5ZkVaU3JPNDYyS2tQZzVvWkpzTjFNZ1VwZjE3T3JNSHd1LzNqWTJlaVNUWnliRXBmCmV4ZU8weWFYQnl2V0c2Nlp6M3pJSkJCdzJyT3BQZ0xFTU5jOTM0YnZWdlYxb1RMRm5iSWtwMUYwcDhJUmpvNVEKZWtrTURWditWaWlnc1pJRTVYZjNDbCt3YjZsckl4SVBQSmRsbCtBRzByTHN0UFkxWTlBdVJhUmpKTjh5QmVjRwpkcTFEa0xMcy83bUJ5b2dORmwyb21IVVRNdmVCQXhxSWJDSDN3ZU5mRmxYSVNEMHdNOWhYKzlnRU5vQ2crWmhSCm15TmorcDM0ZzAzZ3VTb0szQkRweDNLZjg4TERxdSt3bGI5UjRZcWdwT2JtOWhiaG8ra1BBZ01CQUFHamdaTXcKZ1pBd0N3WURWUjBQQkFRREFnV2dNQlFHQTFVZEVRUU5NQXVDQ1d4dlkyRnNhRzl6ZERBZEJnTlZIU1VFRmpBVQpCZ2dyQmdFRkJRY0RBUVlJS3dZQkJRVUhBd0l3REFZRFZSMFRBUUgvQkFJd0FEQWRCZ05WSFE0RUZnUVVxWmt3CkVTNzlkNHBkL05sTWhmd2Y2aDdCOWdNd0h3WURWUjBqQkJnd0ZvQVVWOEFORE1UUEpkUzl6TmF1WStoK0NpY00KV0hVd0RRWUpLb1pJaHZjTkFRRUxCUUFEZ2dFQkFCSElBcHJoQTh0Y1JFRmxuaEZqNm1XcVYyUEMvU0F1N3hPbwpzSWFjczdsZzIwV2tjZkovVzVjSFpubDdndk1tR0UvNDg2ODZmaWF6T1UzRElWb2FYbUtmQjN6NWVnaHN4M0NuCkxqY2p5SG9OS1Z5YkthU3BvbTNEcnRjOTlXMTN2aERMY0N2OXRhaEUrUStxSG9lMk5uSllyZUJzWmVEZ1hpVDAKMWsra3VOTUJVaCtzMEJzS21XT1Nmb1pTdUZSN0MxZjE4VngrRXBCOWNkVE1DeE9MelFWNmdFWGZBM2g4M1ZFZwpCVDFHRlQ0RTB4RHA4WUZSVE0xKzVuNDVGYmZOSFcvYytBalV2MHI1QWwzUWlpZFMwVms1dXFhRUlzNTVWc1c0Ci8xMGdldmZuUmpXTGFLQ2l3OENsRmdWK0dZeW0xV2h5Sk10TGV1U09UTUZ1Z0lkOVRWYz0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=", "encoding": "base64", "item": [ "cert", { "ca": "self-sign", "dns": [ "localhost" ], "name": "quadlet_demo" } ], "source": "/etc/pki/tls/certs/quadlet_demo.crt" } ok: [managed-node2] => (item=['key', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => { "ansible_loop_var": "item", "changed": false, "content": "LS0tLS1CRUdJTiBQUklWQVRFIEtFWS0tLS0tCk1JSUV2QUlCQURBTkJna3Foa2lHOXcwQkFRRUZBQVNDQktZd2dnU2lBZ0VBQW9JQkFRQ3A3T0tKTGJkUlNiN04KQlFiSHRObkRkVU82OGhxNURyUGo5TFVDZnd4SUZ4Z1grWFl5ZkVaU3JPNDYyS2tQZzVvWkpzTjFNZ1VwZjE3TwpyTUh3dS8zalkyZWlTVFp5YkVwZmV4ZU8weWFYQnl2V0c2Nlp6M3pJSkJCdzJyT3BQZ0xFTU5jOTM0YnZWdlYxCm9UTEZuYklrcDFGMHA4SVJqbzVRZWtrTURWditWaWlnc1pJRTVYZjNDbCt3YjZsckl4SVBQSmRsbCtBRzByTHMKdFBZMVk5QXVSYVJqSk44eUJlY0dkcTFEa0xMcy83bUJ5b2dORmwyb21IVVRNdmVCQXhxSWJDSDN3ZU5mRmxYSQpTRDB3TTloWCs5Z0VOb0NnK1poUm15TmorcDM0ZzAzZ3VTb0szQkRweDNLZjg4TERxdSt3bGI5UjRZcWdwT2JtCjloYmhvK2tQQWdNQkFBRUNnZ0VBREgzL3dYbjg2aHJ6emZxNm50L3F5blhLZnVuaGtRMUZxRktMSkIzVzkwM0wKVmFmZlNxTGNRS081OTlXSnlEd3lqZnJOajhZWG53OWdGdTJZYkd4Rm8vdmcySXR0VjBnaXg1WXpDeXIyd1UragpwMS90bkIwbTNVZlpkWkorZHNpNlIxWVgzTktjQnVocnJ6UWttV2FZZkIxWTNheWZsYzdYSTQ5NTQwcndJbWh3CmFmUm1Na3RWMmNsWkVhSE90OFhFUVorRm9nWjlRK05SMGJSOVlodGpVOTN2S0x6OVJMOEZXREpiMW93dlBFb0wKdUsyelYwWGE1U2VMTDlyT29lZGI5MVh3ZWRuS29MRVlqUDEwaFE3bWNQK1JFT29zOTRnTzhxTmlPREZLcDF3cgp2eHRRUkpNWFBuU2s4eFM4RHdBWW55MDJJWTVlWER6K3Y1L3RkdGJLSVFLQmdRRGNOU2dPYjJFL0lXNVdSL0NqCnpyeUltWitSNHgxTzZGWkQyMmcyM0hpczdIRmZtQjBCcmd2RkpOeWY4N3lsZE5La3VEdnRaajlGcTVXajF3QUkKWlozNVJqWFBKekNYZDFLbzBmVmVOaiswMG4wU1BPMkN0eEpnd3crZG1xVzBwUWpXa3pCZTg5dlNBNTZUeHBybQp0aTM1Q3lqV0hTUjk3aC9UT0VBdkRLUEs1d0tCZ1FERmkzc05zaUNrR3d1U3A1S3B0aGQ0enNpOHZYTjhEdFZsClY3MDZSSlNQS0s3T3pMa2V4enE5RXdoQUQxS0Y4ODNkUnVDL2xORTljb1V3c2lnc3o2QlVCSlRwZzUxOTdwU0kKWnNGUFJrajNvMXNyV0dHbkFIdmNWM3NCWmhwclFlWjdZU003ODl3Ni9jSFNiaU9xT2h6UGxBR2Q3V2Z1NlBhZwpCNThBQlR1VG1RS0JnSEtnelJndWpIMnpabDlObFl0L3U0NlVBWWVNcFI0RlBacVhKUG1oN01OL1ZVa0F3dUN6CisvYUV5TUFPdmIyWFFXdHpaK2FkV2dSblhEN3NWVjBKb0tFNUZodWVkK0h3d0R1WEI5WWlBU21jaEhTdytLWVoKckZ4b1RjR3AxZVppSG1hc09mckRrRU1UbFVHeG1jTE5STU1tTnZ5Mk1OL0l6QkpFYnN6UEZKZ3pBb0dBWUdDcQppb1lMVXNVM0lqZkxZQzFNeWszb3RkeWdTVGhiZUhRVUNDZUphMGJuOGxFRFNEYmZPM3ZibkpWZEo2bEpHTm9KCnNLNkVYenhPS3ZIZ2sreEFSbjF0d1hmTEVMT2psK0M2RTFreEpGQjRuRzhrUlh3QThnRmtLOHlLdXlXTnMwZ3oKRG43dFJZS2tXMndNem5KSHVDWjNtM3RHSjlOVTc4S0lHbFJ6T3BFQ2dZQkluK1NQMGZkaks0RHFwRVhTWUwyWgpOWmltVDZGZVRmZEFXY1duQXZ5THZGQXpLWWRzNzRYcTFkRnd0UFpjb3M5UjNNN3QwQWRxOWFCU3N1bU5URVQ2Cjh6N2F4M2tPbHVSajRhMm9EQlpDeGhwU1lMNHZyUWMzWUI1dlZtRTMxY0VoOWhKQzh1YVA1MVJyVDNaMlEyL20KQlZsUHhMcFlBeDFJRHNGSTBKZ3ZwQT09Ci0tLS0tRU5EIFBSSVZBVEUgS0VZLS0tLS0K", "encoding": "base64", "item": [ "key", { "ca": "self-sign", "dns": [ "localhost" ], "name": "quadlet_demo" } ], "source": "/etc/pki/tls/private/quadlet_demo.key" } ok: [managed-node2] => (item=['ca', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => { "ansible_loop_var": "item", "changed": false, "content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURnakNDQW1xZ0F3SUJBZ0lRQ0o0VEFlbTZSb200U0FYNk9XQWtlREFOQmdrcWhraUc5dzBCQVFzRkFEQlEKTVNBd0hnWURWUVFEREJkTWIyTmhiQ0JUYVdkdWFXNW5JRUYxZEdodmNtbDBlVEVzTUNvR0ExVUVBd3dqTURnNQpaVEV6TURFdFpUbGlZVFEyT0RrdFlqZzBPREExWm1FdE16azJNREkwTnpjd0hoY05NalF4TWpJNE1UWXpNekl3CldoY05NalV4TWpJNE1UWXpNekU1V2pBVU1SSXdFQVlEVlFRREV3bHNiMk5oYkdodmMzUXdnZ0VpTUEwR0NTcUcKU0liM0RRRUJBUVVBQTRJQkR3QXdnZ0VLQW9JQkFRQ3A3T0tKTGJkUlNiN05CUWJIdE5uRGRVTzY4aHE1RHJQago5TFVDZnd4SUZ4Z1grWFl5ZkVaU3JPNDYyS2tQZzVvWkpzTjFNZ1VwZjE3T3JNSHd1LzNqWTJlaVNUWnliRXBmCmV4ZU8weWFYQnl2V0c2Nlp6M3pJSkJCdzJyT3BQZ0xFTU5jOTM0YnZWdlYxb1RMRm5iSWtwMUYwcDhJUmpvNVEKZWtrTURWditWaWlnc1pJRTVYZjNDbCt3YjZsckl4SVBQSmRsbCtBRzByTHN0UFkxWTlBdVJhUmpKTjh5QmVjRwpkcTFEa0xMcy83bUJ5b2dORmwyb21IVVRNdmVCQXhxSWJDSDN3ZU5mRmxYSVNEMHdNOWhYKzlnRU5vQ2crWmhSCm15TmorcDM0ZzAzZ3VTb0szQkRweDNLZjg4TERxdSt3bGI5UjRZcWdwT2JtOWhiaG8ra1BBZ01CQUFHamdaTXcKZ1pBd0N3WURWUjBQQkFRREFnV2dNQlFHQTFVZEVRUU5NQXVDQ1d4dlkyRnNhRzl6ZERBZEJnTlZIU1VFRmpBVQpCZ2dyQmdFRkJRY0RBUVlJS3dZQkJRVUhBd0l3REFZRFZSMFRBUUgvQkFJd0FEQWRCZ05WSFE0RUZnUVVxWmt3CkVTNzlkNHBkL05sTWhmd2Y2aDdCOWdNd0h3WURWUjBqQkJnd0ZvQVVWOEFORE1UUEpkUzl6TmF1WStoK0NpY00KV0hVd0RRWUpLb1pJaHZjTkFRRUxCUUFEZ2dFQkFCSElBcHJoQTh0Y1JFRmxuaEZqNm1XcVYyUEMvU0F1N3hPbwpzSWFjczdsZzIwV2tjZkovVzVjSFpubDdndk1tR0UvNDg2ODZmaWF6T1UzRElWb2FYbUtmQjN6NWVnaHN4M0NuCkxqY2p5SG9OS1Z5YkthU3BvbTNEcnRjOTlXMTN2aERMY0N2OXRhaEUrUStxSG9lMk5uSllyZUJzWmVEZ1hpVDAKMWsra3VOTUJVaCtzMEJzS21XT1Nmb1pTdUZSN0MxZjE4VngrRXBCOWNkVE1DeE9MelFWNmdFWGZBM2g4M1ZFZwpCVDFHRlQ0RTB4RHA4WUZSVE0xKzVuNDVGYmZOSFcvYytBalV2MHI1QWwzUWlpZFMwVms1dXFhRUlzNTVWc1c0Ci8xMGdldmZuUmpXTGFLQ2l3OENsRmdWK0dZeW0xV2h5Sk10TGV1U09UTUZ1Z0lkOVRWYz0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=", "encoding": "base64", "item": [ "ca", { "ca": "self-sign", "dns": [ "localhost" ], "name": "quadlet_demo" } ], "source": "/etc/pki/tls/certs/quadlet_demo.crt" } TASK [fedora.linux_system_roles.certificate : Create return data] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:160 Saturday 28 December 2024 11:33:22 -0500 (0:00:01.309) 0:00:12.315 ***** ok: [managed-node2] => { "ansible_facts": { "certificate_test_certs": { "quadlet_demo": { "ca": "/etc/pki/tls/certs/quadlet_demo.crt", "ca_content": "-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQCJ4TAem6Rom4SAX6OWAkeDANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMDg5\nZTEzMDEtZTliYTQ2ODktYjg0ODA1ZmEtMzk2MDI0NzcwHhcNMjQxMjI4MTYzMzIw\nWhcNMjUxMjI4MTYzMzE5WjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCp7OKJLbdRSb7NBQbHtNnDdUO68hq5DrPj\n9LUCfwxIFxgX+XYyfEZSrO462KkPg5oZJsN1MgUpf17OrMHwu/3jY2eiSTZybEpf\nexeO0yaXByvWG66Zz3zIJBBw2rOpPgLEMNc934bvVvV1oTLFnbIkp1F0p8IRjo5Q\nekkMDVv+ViigsZIE5Xf3Cl+wb6lrIxIPPJdll+AG0rLstPY1Y9AuRaRjJN8yBecG\ndq1DkLLs/7mByogNFl2omHUTMveBAxqIbCH3weNfFlXISD0wM9hX+9gENoCg+ZhR\nmyNj+p34g03guSoK3BDpx3Kf88LDqu+wlb9R4YqgpObm9hbho+kPAgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUqZkw\nES79d4pd/NlMhfwf6h7B9gMwHwYDVR0jBBgwFoAUV8ANDMTPJdS9zNauY+h+CicM\nWHUwDQYJKoZIhvcNAQELBQADggEBABHIAprhA8tcREFlnhFj6mWqV2PC/SAu7xOo\nsIacs7lg20WkcfJ/W5cHZnl7gvMmGE/48686fiazOU3DIVoaXmKfB3z5eghsx3Cn\nLjcjyHoNKVybKaSpom3Drtc99W13vhDLcCv9tahE+Q+qHoe2NnJYreBsZeDgXiT0\n1k+kuNMBUh+s0BsKmWOSfoZSuFR7C1f18Vx+EpB9cdTMCxOLzQV6gEXfA3h83VEg\nBT1GFT4E0xDp8YFRTM1+5n45FbfNHW/c+AjUv0r5Al3QiidS0Vk5uqaEIs55VsW4\n/10gevfnRjWLaKCiw8ClFgV+GYym1WhyJMtLeuSOTMFugId9TVc=\n-----END CERTIFICATE-----\n", "cert": "/etc/pki/tls/certs/quadlet_demo.crt", "cert_content": "-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQCJ4TAem6Rom4SAX6OWAkeDANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMDg5\nZTEzMDEtZTliYTQ2ODktYjg0ODA1ZmEtMzk2MDI0NzcwHhcNMjQxMjI4MTYzMzIw\nWhcNMjUxMjI4MTYzMzE5WjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCp7OKJLbdRSb7NBQbHtNnDdUO68hq5DrPj\n9LUCfwxIFxgX+XYyfEZSrO462KkPg5oZJsN1MgUpf17OrMHwu/3jY2eiSTZybEpf\nexeO0yaXByvWG66Zz3zIJBBw2rOpPgLEMNc934bvVvV1oTLFnbIkp1F0p8IRjo5Q\nekkMDVv+ViigsZIE5Xf3Cl+wb6lrIxIPPJdll+AG0rLstPY1Y9AuRaRjJN8yBecG\ndq1DkLLs/7mByogNFl2omHUTMveBAxqIbCH3weNfFlXISD0wM9hX+9gENoCg+ZhR\nmyNj+p34g03guSoK3BDpx3Kf88LDqu+wlb9R4YqgpObm9hbho+kPAgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUqZkw\nES79d4pd/NlMhfwf6h7B9gMwHwYDVR0jBBgwFoAUV8ANDMTPJdS9zNauY+h+CicM\nWHUwDQYJKoZIhvcNAQELBQADggEBABHIAprhA8tcREFlnhFj6mWqV2PC/SAu7xOo\nsIacs7lg20WkcfJ/W5cHZnl7gvMmGE/48686fiazOU3DIVoaXmKfB3z5eghsx3Cn\nLjcjyHoNKVybKaSpom3Drtc99W13vhDLcCv9tahE+Q+qHoe2NnJYreBsZeDgXiT0\n1k+kuNMBUh+s0BsKmWOSfoZSuFR7C1f18Vx+EpB9cdTMCxOLzQV6gEXfA3h83VEg\nBT1GFT4E0xDp8YFRTM1+5n45FbfNHW/c+AjUv0r5Al3QiidS0Vk5uqaEIs55VsW4\n/10gevfnRjWLaKCiw8ClFgV+GYym1WhyJMtLeuSOTMFugId9TVc=\n-----END CERTIFICATE-----\n", "key": "/etc/pki/tls/private/quadlet_demo.key", "key_content": "-----BEGIN PRIVATE KEY-----\nMIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQCp7OKJLbdRSb7N\nBQbHtNnDdUO68hq5DrPj9LUCfwxIFxgX+XYyfEZSrO462KkPg5oZJsN1MgUpf17O\nrMHwu/3jY2eiSTZybEpfexeO0yaXByvWG66Zz3zIJBBw2rOpPgLEMNc934bvVvV1\noTLFnbIkp1F0p8IRjo5QekkMDVv+ViigsZIE5Xf3Cl+wb6lrIxIPPJdll+AG0rLs\ntPY1Y9AuRaRjJN8yBecGdq1DkLLs/7mByogNFl2omHUTMveBAxqIbCH3weNfFlXI\nSD0wM9hX+9gENoCg+ZhRmyNj+p34g03guSoK3BDpx3Kf88LDqu+wlb9R4YqgpObm\n9hbho+kPAgMBAAECggEADH3/wXn86hrzzfq6nt/qynXKfunhkQ1FqFKLJB3W903L\nVaffSqLcQKO599WJyDwyjfrNj8YXnw9gFu2YbGxFo/vg2IttV0gix5YzCyr2wU+j\np1/tnB0m3UfZdZJ+dsi6R1YX3NKcBuhrrzQkmWaYfB1Y3ayflc7XI49540rwImhw\nafRmMktV2clZEaHOt8XEQZ+FogZ9Q+NR0bR9YhtjU93vKLz9RL8FWDJb1owvPEoL\nuK2zV0Xa5SeLL9rOoedb91XwednKoLEYjP10hQ7mcP+REOos94gO8qNiODFKp1wr\nvxtQRJMXPnSk8xS8DwAYny02IY5eXDz+v5/tdtbKIQKBgQDcNSgOb2E/IW5WR/Cj\nzryImZ+R4x1O6FZD22g23His7HFfmB0BrgvFJNyf87yldNKkuDvtZj9Fq5Wj1wAI\nZZ35RjXPJzCXd1Ko0fVeNj+00n0SPO2CtxJgww+dmqW0pQjWkzBe89vSA56Txprm\nti35CyjWHSR97h/TOEAvDKPK5wKBgQDFi3sNsiCkGwuSp5Kpthd4zsi8vXN8DtVl\nV706RJSPKK7OzLkexzq9EwhAD1KF883dRuC/lNE9coUwsigsz6BUBJTpg5197pSI\nZsFPRkj3o1srWGGnAHvcV3sBZhprQeZ7YSM789w6/cHSbiOqOhzPlAGd7Wfu6Pag\nB58ABTuTmQKBgHKgzRgujH2zZl9NlYt/u46UAYeMpR4FPZqXJPmh7MN/VUkAwuCz\n+/aEyMAOvb2XQWtzZ+adWgRnXD7sVV0JoKE5Fhued+HwwDuXB9YiASmchHSw+KYZ\nrFxoTcGp1eZiHmasOfrDkEMTlUGxmcLNRMMmNvy2MN/IzBJEbszPFJgzAoGAYGCq\nioYLUsU3IjfLYC1Myk3otdygSThbeHQUCCeJa0bn8lEDSDbfO3vbnJVdJ6lJGNoJ\nsK6EXzxOKvHgk+xARn1twXfLELOjl+C6E1kxJFB4nG8kRXwA8gFkK8yKuyWNs0gz\nDn7tRYKkW2wMznJHuCZ3m3tGJ9NU78KIGlRzOpECgYBIn+SP0fdjK4DqpEXSYL2Z\nNZimT6FeTfdAWcWnAvyLvFAzKYds74Xq1dFwtPZcos9R3M7t0Adq9aBSsumNTET6\n8z7ax3kOluRj4a2oDBZCxhpSYL4vrQc3YB5vVmE31cEh9hJC8uaP51RrT3Z2Q2/m\nBVlPxLpYAx1IDsFI0JgvpA==\n-----END PRIVATE KEY-----\n" } } }, "changed": false } TASK [fedora.linux_system_roles.certificate : Stop tracking certificates] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:176 Saturday 28 December 2024 11:33:22 -0500 (0:00:00.066) 0:00:12.381 ***** ok: [managed-node2] => (item={'cert': '/etc/pki/tls/certs/quadlet_demo.crt', 'cert_content': '-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQCJ4TAem6Rom4SAX6OWAkeDANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMDg5\nZTEzMDEtZTliYTQ2ODktYjg0ODA1ZmEtMzk2MDI0NzcwHhcNMjQxMjI4MTYzMzIw\nWhcNMjUxMjI4MTYzMzE5WjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCp7OKJLbdRSb7NBQbHtNnDdUO68hq5DrPj\n9LUCfwxIFxgX+XYyfEZSrO462KkPg5oZJsN1MgUpf17OrMHwu/3jY2eiSTZybEpf\nexeO0yaXByvWG66Zz3zIJBBw2rOpPgLEMNc934bvVvV1oTLFnbIkp1F0p8IRjo5Q\nekkMDVv+ViigsZIE5Xf3Cl+wb6lrIxIPPJdll+AG0rLstPY1Y9AuRaRjJN8yBecG\ndq1DkLLs/7mByogNFl2omHUTMveBAxqIbCH3weNfFlXISD0wM9hX+9gENoCg+ZhR\nmyNj+p34g03guSoK3BDpx3Kf88LDqu+wlb9R4YqgpObm9hbho+kPAgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUqZkw\nES79d4pd/NlMhfwf6h7B9gMwHwYDVR0jBBgwFoAUV8ANDMTPJdS9zNauY+h+CicM\nWHUwDQYJKoZIhvcNAQELBQADggEBABHIAprhA8tcREFlnhFj6mWqV2PC/SAu7xOo\nsIacs7lg20WkcfJ/W5cHZnl7gvMmGE/48686fiazOU3DIVoaXmKfB3z5eghsx3Cn\nLjcjyHoNKVybKaSpom3Drtc99W13vhDLcCv9tahE+Q+qHoe2NnJYreBsZeDgXiT0\n1k+kuNMBUh+s0BsKmWOSfoZSuFR7C1f18Vx+EpB9cdTMCxOLzQV6gEXfA3h83VEg\nBT1GFT4E0xDp8YFRTM1+5n45FbfNHW/c+AjUv0r5Al3QiidS0Vk5uqaEIs55VsW4\n/10gevfnRjWLaKCiw8ClFgV+GYym1WhyJMtLeuSOTMFugId9TVc=\n-----END CERTIFICATE-----\n', 'key': '/etc/pki/tls/private/quadlet_demo.key', 'key_content': '-----BEGIN PRIVATE KEY-----\nMIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQCp7OKJLbdRSb7N\nBQbHtNnDdUO68hq5DrPj9LUCfwxIFxgX+XYyfEZSrO462KkPg5oZJsN1MgUpf17O\nrMHwu/3jY2eiSTZybEpfexeO0yaXByvWG66Zz3zIJBBw2rOpPgLEMNc934bvVvV1\noTLFnbIkp1F0p8IRjo5QekkMDVv+ViigsZIE5Xf3Cl+wb6lrIxIPPJdll+AG0rLs\ntPY1Y9AuRaRjJN8yBecGdq1DkLLs/7mByogNFl2omHUTMveBAxqIbCH3weNfFlXI\nSD0wM9hX+9gENoCg+ZhRmyNj+p34g03guSoK3BDpx3Kf88LDqu+wlb9R4YqgpObm\n9hbho+kPAgMBAAECggEADH3/wXn86hrzzfq6nt/qynXKfunhkQ1FqFKLJB3W903L\nVaffSqLcQKO599WJyDwyjfrNj8YXnw9gFu2YbGxFo/vg2IttV0gix5YzCyr2wU+j\np1/tnB0m3UfZdZJ+dsi6R1YX3NKcBuhrrzQkmWaYfB1Y3ayflc7XI49540rwImhw\nafRmMktV2clZEaHOt8XEQZ+FogZ9Q+NR0bR9YhtjU93vKLz9RL8FWDJb1owvPEoL\nuK2zV0Xa5SeLL9rOoedb91XwednKoLEYjP10hQ7mcP+REOos94gO8qNiODFKp1wr\nvxtQRJMXPnSk8xS8DwAYny02IY5eXDz+v5/tdtbKIQKBgQDcNSgOb2E/IW5WR/Cj\nzryImZ+R4x1O6FZD22g23His7HFfmB0BrgvFJNyf87yldNKkuDvtZj9Fq5Wj1wAI\nZZ35RjXPJzCXd1Ko0fVeNj+00n0SPO2CtxJgww+dmqW0pQjWkzBe89vSA56Txprm\nti35CyjWHSR97h/TOEAvDKPK5wKBgQDFi3sNsiCkGwuSp5Kpthd4zsi8vXN8DtVl\nV706RJSPKK7OzLkexzq9EwhAD1KF883dRuC/lNE9coUwsigsz6BUBJTpg5197pSI\nZsFPRkj3o1srWGGnAHvcV3sBZhprQeZ7YSM789w6/cHSbiOqOhzPlAGd7Wfu6Pag\nB58ABTuTmQKBgHKgzRgujH2zZl9NlYt/u46UAYeMpR4FPZqXJPmh7MN/VUkAwuCz\n+/aEyMAOvb2XQWtzZ+adWgRnXD7sVV0JoKE5Fhued+HwwDuXB9YiASmchHSw+KYZ\nrFxoTcGp1eZiHmasOfrDkEMTlUGxmcLNRMMmNvy2MN/IzBJEbszPFJgzAoGAYGCq\nioYLUsU3IjfLYC1Myk3otdygSThbeHQUCCeJa0bn8lEDSDbfO3vbnJVdJ6lJGNoJ\nsK6EXzxOKvHgk+xARn1twXfLELOjl+C6E1kxJFB4nG8kRXwA8gFkK8yKuyWNs0gz\nDn7tRYKkW2wMznJHuCZ3m3tGJ9NU78KIGlRzOpECgYBIn+SP0fdjK4DqpEXSYL2Z\nNZimT6FeTfdAWcWnAvyLvFAzKYds74Xq1dFwtPZcos9R3M7t0Adq9aBSsumNTET6\n8z7ax3kOluRj4a2oDBZCxhpSYL4vrQc3YB5vVmE31cEh9hJC8uaP51RrT3Z2Q2/m\nBVlPxLpYAx1IDsFI0JgvpA==\n-----END PRIVATE KEY-----\n', 'ca': '/etc/pki/tls/certs/quadlet_demo.crt', 'ca_content': '-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQCJ4TAem6Rom4SAX6OWAkeDANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMDg5\nZTEzMDEtZTliYTQ2ODktYjg0ODA1ZmEtMzk2MDI0NzcwHhcNMjQxMjI4MTYzMzIw\nWhcNMjUxMjI4MTYzMzE5WjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCp7OKJLbdRSb7NBQbHtNnDdUO68hq5DrPj\n9LUCfwxIFxgX+XYyfEZSrO462KkPg5oZJsN1MgUpf17OrMHwu/3jY2eiSTZybEpf\nexeO0yaXByvWG66Zz3zIJBBw2rOpPgLEMNc934bvVvV1oTLFnbIkp1F0p8IRjo5Q\nekkMDVv+ViigsZIE5Xf3Cl+wb6lrIxIPPJdll+AG0rLstPY1Y9AuRaRjJN8yBecG\ndq1DkLLs/7mByogNFl2omHUTMveBAxqIbCH3weNfFlXISD0wM9hX+9gENoCg+ZhR\nmyNj+p34g03guSoK3BDpx3Kf88LDqu+wlb9R4YqgpObm9hbho+kPAgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUqZkw\nES79d4pd/NlMhfwf6h7B9gMwHwYDVR0jBBgwFoAUV8ANDMTPJdS9zNauY+h+CicM\nWHUwDQYJKoZIhvcNAQELBQADggEBABHIAprhA8tcREFlnhFj6mWqV2PC/SAu7xOo\nsIacs7lg20WkcfJ/W5cHZnl7gvMmGE/48686fiazOU3DIVoaXmKfB3z5eghsx3Cn\nLjcjyHoNKVybKaSpom3Drtc99W13vhDLcCv9tahE+Q+qHoe2NnJYreBsZeDgXiT0\n1k+kuNMBUh+s0BsKmWOSfoZSuFR7C1f18Vx+EpB9cdTMCxOLzQV6gEXfA3h83VEg\nBT1GFT4E0xDp8YFRTM1+5n45FbfNHW/c+AjUv0r5Al3QiidS0Vk5uqaEIs55VsW4\n/10gevfnRjWLaKCiw8ClFgV+GYym1WhyJMtLeuSOTMFugId9TVc=\n-----END CERTIFICATE-----\n'}) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "getcert", "stop-tracking", "-f", "/etc/pki/tls/certs/quadlet_demo.crt" ], "delta": "0:00:00.027111", "end": "2024-12-28 11:33:22.729100", "item": { "ca": "/etc/pki/tls/certs/quadlet_demo.crt", "ca_content": "-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQCJ4TAem6Rom4SAX6OWAkeDANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMDg5\nZTEzMDEtZTliYTQ2ODktYjg0ODA1ZmEtMzk2MDI0NzcwHhcNMjQxMjI4MTYzMzIw\nWhcNMjUxMjI4MTYzMzE5WjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCp7OKJLbdRSb7NBQbHtNnDdUO68hq5DrPj\n9LUCfwxIFxgX+XYyfEZSrO462KkPg5oZJsN1MgUpf17OrMHwu/3jY2eiSTZybEpf\nexeO0yaXByvWG66Zz3zIJBBw2rOpPgLEMNc934bvVvV1oTLFnbIkp1F0p8IRjo5Q\nekkMDVv+ViigsZIE5Xf3Cl+wb6lrIxIPPJdll+AG0rLstPY1Y9AuRaRjJN8yBecG\ndq1DkLLs/7mByogNFl2omHUTMveBAxqIbCH3weNfFlXISD0wM9hX+9gENoCg+ZhR\nmyNj+p34g03guSoK3BDpx3Kf88LDqu+wlb9R4YqgpObm9hbho+kPAgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUqZkw\nES79d4pd/NlMhfwf6h7B9gMwHwYDVR0jBBgwFoAUV8ANDMTPJdS9zNauY+h+CicM\nWHUwDQYJKoZIhvcNAQELBQADggEBABHIAprhA8tcREFlnhFj6mWqV2PC/SAu7xOo\nsIacs7lg20WkcfJ/W5cHZnl7gvMmGE/48686fiazOU3DIVoaXmKfB3z5eghsx3Cn\nLjcjyHoNKVybKaSpom3Drtc99W13vhDLcCv9tahE+Q+qHoe2NnJYreBsZeDgXiT0\n1k+kuNMBUh+s0BsKmWOSfoZSuFR7C1f18Vx+EpB9cdTMCxOLzQV6gEXfA3h83VEg\nBT1GFT4E0xDp8YFRTM1+5n45FbfNHW/c+AjUv0r5Al3QiidS0Vk5uqaEIs55VsW4\n/10gevfnRjWLaKCiw8ClFgV+GYym1WhyJMtLeuSOTMFugId9TVc=\n-----END CERTIFICATE-----\n", "cert": "/etc/pki/tls/certs/quadlet_demo.crt", "cert_content": "-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQCJ4TAem6Rom4SAX6OWAkeDANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMDg5\nZTEzMDEtZTliYTQ2ODktYjg0ODA1ZmEtMzk2MDI0NzcwHhcNMjQxMjI4MTYzMzIw\nWhcNMjUxMjI4MTYzMzE5WjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCp7OKJLbdRSb7NBQbHtNnDdUO68hq5DrPj\n9LUCfwxIFxgX+XYyfEZSrO462KkPg5oZJsN1MgUpf17OrMHwu/3jY2eiSTZybEpf\nexeO0yaXByvWG66Zz3zIJBBw2rOpPgLEMNc934bvVvV1oTLFnbIkp1F0p8IRjo5Q\nekkMDVv+ViigsZIE5Xf3Cl+wb6lrIxIPPJdll+AG0rLstPY1Y9AuRaRjJN8yBecG\ndq1DkLLs/7mByogNFl2omHUTMveBAxqIbCH3weNfFlXISD0wM9hX+9gENoCg+ZhR\nmyNj+p34g03guSoK3BDpx3Kf88LDqu+wlb9R4YqgpObm9hbho+kPAgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUqZkw\nES79d4pd/NlMhfwf6h7B9gMwHwYDVR0jBBgwFoAUV8ANDMTPJdS9zNauY+h+CicM\nWHUwDQYJKoZIhvcNAQELBQADggEBABHIAprhA8tcREFlnhFj6mWqV2PC/SAu7xOo\nsIacs7lg20WkcfJ/W5cHZnl7gvMmGE/48686fiazOU3DIVoaXmKfB3z5eghsx3Cn\nLjcjyHoNKVybKaSpom3Drtc99W13vhDLcCv9tahE+Q+qHoe2NnJYreBsZeDgXiT0\n1k+kuNMBUh+s0BsKmWOSfoZSuFR7C1f18Vx+EpB9cdTMCxOLzQV6gEXfA3h83VEg\nBT1GFT4E0xDp8YFRTM1+5n45FbfNHW/c+AjUv0r5Al3QiidS0Vk5uqaEIs55VsW4\n/10gevfnRjWLaKCiw8ClFgV+GYym1WhyJMtLeuSOTMFugId9TVc=\n-----END CERTIFICATE-----\n", "key": "/etc/pki/tls/private/quadlet_demo.key", "key_content": "-----BEGIN PRIVATE KEY-----\nMIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQCp7OKJLbdRSb7N\nBQbHtNnDdUO68hq5DrPj9LUCfwxIFxgX+XYyfEZSrO462KkPg5oZJsN1MgUpf17O\nrMHwu/3jY2eiSTZybEpfexeO0yaXByvWG66Zz3zIJBBw2rOpPgLEMNc934bvVvV1\noTLFnbIkp1F0p8IRjo5QekkMDVv+ViigsZIE5Xf3Cl+wb6lrIxIPPJdll+AG0rLs\ntPY1Y9AuRaRjJN8yBecGdq1DkLLs/7mByogNFl2omHUTMveBAxqIbCH3weNfFlXI\nSD0wM9hX+9gENoCg+ZhRmyNj+p34g03guSoK3BDpx3Kf88LDqu+wlb9R4YqgpObm\n9hbho+kPAgMBAAECggEADH3/wXn86hrzzfq6nt/qynXKfunhkQ1FqFKLJB3W903L\nVaffSqLcQKO599WJyDwyjfrNj8YXnw9gFu2YbGxFo/vg2IttV0gix5YzCyr2wU+j\np1/tnB0m3UfZdZJ+dsi6R1YX3NKcBuhrrzQkmWaYfB1Y3ayflc7XI49540rwImhw\nafRmMktV2clZEaHOt8XEQZ+FogZ9Q+NR0bR9YhtjU93vKLz9RL8FWDJb1owvPEoL\nuK2zV0Xa5SeLL9rOoedb91XwednKoLEYjP10hQ7mcP+REOos94gO8qNiODFKp1wr\nvxtQRJMXPnSk8xS8DwAYny02IY5eXDz+v5/tdtbKIQKBgQDcNSgOb2E/IW5WR/Cj\nzryImZ+R4x1O6FZD22g23His7HFfmB0BrgvFJNyf87yldNKkuDvtZj9Fq5Wj1wAI\nZZ35RjXPJzCXd1Ko0fVeNj+00n0SPO2CtxJgww+dmqW0pQjWkzBe89vSA56Txprm\nti35CyjWHSR97h/TOEAvDKPK5wKBgQDFi3sNsiCkGwuSp5Kpthd4zsi8vXN8DtVl\nV706RJSPKK7OzLkexzq9EwhAD1KF883dRuC/lNE9coUwsigsz6BUBJTpg5197pSI\nZsFPRkj3o1srWGGnAHvcV3sBZhprQeZ7YSM789w6/cHSbiOqOhzPlAGd7Wfu6Pag\nB58ABTuTmQKBgHKgzRgujH2zZl9NlYt/u46UAYeMpR4FPZqXJPmh7MN/VUkAwuCz\n+/aEyMAOvb2XQWtzZ+adWgRnXD7sVV0JoKE5Fhued+HwwDuXB9YiASmchHSw+KYZ\nrFxoTcGp1eZiHmasOfrDkEMTlUGxmcLNRMMmNvy2MN/IzBJEbszPFJgzAoGAYGCq\nioYLUsU3IjfLYC1Myk3otdygSThbeHQUCCeJa0bn8lEDSDbfO3vbnJVdJ6lJGNoJ\nsK6EXzxOKvHgk+xARn1twXfLELOjl+C6E1kxJFB4nG8kRXwA8gFkK8yKuyWNs0gz\nDn7tRYKkW2wMznJHuCZ3m3tGJ9NU78KIGlRzOpECgYBIn+SP0fdjK4DqpEXSYL2Z\nNZimT6FeTfdAWcWnAvyLvFAzKYds74Xq1dFwtPZcos9R3M7t0Adq9aBSsumNTET6\n8z7ax3kOluRj4a2oDBZCxhpSYL4vrQc3YB5vVmE31cEh9hJC8uaP51RrT3Z2Q2/m\nBVlPxLpYAx1IDsFI0JgvpA==\n-----END PRIVATE KEY-----\n" }, "rc": 0, "start": "2024-12-28 11:33:22.701989" } STDOUT: Request "20241228163320" removed. TASK [fedora.linux_system_roles.certificate : Remove files] ******************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:181 Saturday 28 December 2024 11:33:22 -0500 (0:00:00.661) 0:00:13.043 ***** changed: [managed-node2] => (item=/etc/pki/tls/certs/quadlet_demo.crt) => { "ansible_loop_var": "item", "changed": true, "item": "/etc/pki/tls/certs/quadlet_demo.crt", "path": "/etc/pki/tls/certs/quadlet_demo.crt", "state": "absent" } changed: [managed-node2] => (item=/etc/pki/tls/private/quadlet_demo.key) => { "ansible_loop_var": "item", "changed": true, "item": "/etc/pki/tls/private/quadlet_demo.key", "path": "/etc/pki/tls/private/quadlet_demo.key", "state": "absent" } ok: [managed-node2] => (item=/etc/pki/tls/certs/quadlet_demo.crt) => { "ansible_loop_var": "item", "changed": false, "item": "/etc/pki/tls/certs/quadlet_demo.crt", "path": "/etc/pki/tls/certs/quadlet_demo.crt", "state": "absent" } TASK [Run the role] ************************************************************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:62 Saturday 28 December 2024 11:33:24 -0500 (0:00:01.233) 0:00:14.276 ***** included: fedora.linux_system_roles.podman for managed-node2 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 28 December 2024 11:33:24 -0500 (0:00:00.160) 0:00:14.437 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 28 December 2024 11:33:24 -0500 (0:00:00.045) 0:00:14.483 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 28 December 2024 11:33:24 -0500 (0:00:00.050) 0:00:14.533 ***** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 28 December 2024 11:33:24 -0500 (0:00:00.362) 0:00:14.895 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 28 December 2024 11:33:24 -0500 (0:00:00.033) 0:00:14.929 ***** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 28 December 2024 11:33:25 -0500 (0:00:00.375) 0:00:15.305 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 28 December 2024 11:33:25 -0500 (0:00:00.038) 0:00:15.343 ***** ok: [managed-node2] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 28 December 2024 11:33:25 -0500 (0:00:00.073) 0:00:15.416 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 28 December 2024 11:33:26 -0500 (0:00:01.186) 0:00:16.602 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 28 December 2024 11:33:26 -0500 (0:00:00.058) 0:00:16.661 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 28 December 2024 11:33:26 -0500 (0:00:00.050) 0:00:16.711 ***** skipping: [managed-node2] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 28 December 2024 11:33:26 -0500 (0:00:00.056) 0:00:16.768 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 28 December 2024 11:33:26 -0500 (0:00:00.048) 0:00:16.817 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 28 December 2024 11:33:26 -0500 (0:00:00.043) 0:00:16.860 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.028711", "end": "2024-12-28 11:33:26.939235", "rc": 0, "start": "2024-12-28 11:33:26.910524" } STDOUT: podman version 5.3.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 28 December 2024 11:33:27 -0500 (0:00:00.391) 0:00:17.252 ***** ok: [managed-node2] => { "ansible_facts": { "podman_version": "5.3.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 28 December 2024 11:33:27 -0500 (0:00:00.034) 0:00:17.287 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 28 December 2024 11:33:27 -0500 (0:00:00.029) 0:00:17.316 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 28 December 2024 11:33:27 -0500 (0:00:00.092) 0:00:17.409 ***** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 28 December 2024 11:33:27 -0500 (0:00:00.146) 0:00:17.555 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 28 December 2024 11:33:27 -0500 (0:00:00.072) 0:00:17.627 ***** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 28 December 2024 11:33:27 -0500 (0:00:00.055) 0:00:17.683 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 28 December 2024 11:33:27 -0500 (0:00:00.066) 0:00:17.750 ***** ok: [managed-node2] => { "ansible_facts": { "getent_passwd": { "root": [ "x", "0", "0", "Super User", "/root", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 28 December 2024 11:33:28 -0500 (0:00:00.521) 0:00:18.271 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 28 December 2024 11:33:28 -0500 (0:00:00.112) 0:00:18.385 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 28 December 2024 11:33:28 -0500 (0:00:00.081) 0:00:18.466 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1735403385.845081, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1735403362.2548923, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3230249097", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 28 December 2024 11:33:28 -0500 (0:00:00.484) 0:00:18.951 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 28 December 2024 11:33:28 -0500 (0:00:00.080) 0:00:19.031 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 28 December 2024 11:33:28 -0500 (0:00:00.068) 0:00:19.100 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 28 December 2024 11:33:28 -0500 (0:00:00.055) 0:00:19.156 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 28 December 2024 11:33:28 -0500 (0:00:00.045) 0:00:19.201 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 28 December 2024 11:33:29 -0500 (0:00:00.066) 0:00:19.268 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 28 December 2024 11:33:29 -0500 (0:00:00.058) 0:00:19.327 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 28 December 2024 11:33:29 -0500 (0:00:00.049) 0:00:19.376 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 28 December 2024 11:33:29 -0500 (0:00:00.061) 0:00:19.438 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Saturday 28 December 2024 11:33:29 -0500 (0:00:00.086) 0:00:19.524 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 28 December 2024 11:33:29 -0500 (0:00:00.093) 0:00:19.617 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 28 December 2024 11:33:29 -0500 (0:00:00.157) 0:00:19.774 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Saturday 28 December 2024 11:33:29 -0500 (0:00:00.077) 0:00:19.852 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 28 December 2024 11:33:29 -0500 (0:00:00.102) 0:00:19.954 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 28 December 2024 11:33:29 -0500 (0:00:00.064) 0:00:20.019 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Saturday 28 December 2024 11:33:29 -0500 (0:00:00.048) 0:00:20.068 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Saturday 28 December 2024 11:33:29 -0500 (0:00:00.088) 0:00:20.157 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Saturday 28 December 2024 11:33:29 -0500 (0:00:00.040) 0:00:20.198 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Saturday 28 December 2024 11:33:30 -0500 (0:00:00.050) 0:00:20.248 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Saturday 28 December 2024 11:33:30 -0500 (0:00:00.097) 0:00:20.346 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Saturday 28 December 2024 11:33:30 -0500 (0:00:00.037) 0:00:20.383 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Saturday 28 December 2024 11:33:30 -0500 (0:00:00.038) 0:00:20.421 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Saturday 28 December 2024 11:33:30 -0500 (0:00:00.035) 0:00:20.456 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Saturday 28 December 2024 11:33:30 -0500 (0:00:00.027) 0:00:20.484 ***** included: fedora.linux_system_roles.firewall for managed-node2 TASK [fedora.linux_system_roles.firewall : Setup firewalld] ******************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2 Saturday 28 December 2024 11:33:30 -0500 (0:00:00.094) 0:00:20.579 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node2 TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2 Saturday 28 December 2024 11:33:30 -0500 (0:00:00.068) 0:00:20.648 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if system is ostree] ********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10 Saturday 28 December 2024 11:33:30 -0500 (0:00:00.110) 0:00:20.758 ***** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15 Saturday 28 December 2024 11:33:30 -0500 (0:00:00.428) 0:00:21.187 ***** ok: [managed-node2] => { "ansible_facts": { "__firewall_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22 Saturday 28 December 2024 11:33:31 -0500 (0:00:00.101) 0:00:21.289 ***** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27 Saturday 28 December 2024 11:33:31 -0500 (0:00:00.418) 0:00:21.707 ***** ok: [managed-node2] => { "ansible_facts": { "__firewall_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.firewall : Install firewalld] ****************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31 Saturday 28 December 2024 11:33:31 -0500 (0:00:00.034) 0:00:21.742 ***** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: firewalld TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:43 Saturday 28 December 2024 11:33:32 -0500 (0:00:00.777) 0:00:22.519 ***** skipping: [managed-node2] => { "false_condition": "__firewall_is_transactional | d(false)" } TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:48 Saturday 28 December 2024 11:33:32 -0500 (0:00:00.034) 0:00:22.554 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:53 Saturday 28 December 2024 11:33:32 -0500 (0:00:00.028) 0:00:22.583 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Collect service facts] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5 Saturday 28 December 2024 11:33:32 -0500 (0:00:00.033) 0:00:22.616 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:9 Saturday 28 December 2024 11:33:32 -0500 (0:00:00.042) 0:00:22.659 ***** skipping: [managed-node2] => (item=nftables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "nftables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=iptables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "iptables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=ufw) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "ufw", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] *********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22 Saturday 28 December 2024 11:33:32 -0500 (0:00:00.050) 0:00:22.710 ***** ok: [managed-node2] => { "changed": false, "name": "firewalld", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2024-12-28 11:29:52 EST", "ActiveEnterTimestampMonotonic": "332814185", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "system.slice dbus-broker.service polkit.service basic.target sysinit.target dbus.socket", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2024-12-28 11:29:51 EST", "AssertTimestampMonotonic": "332166798", "Before": "multi-user.target shutdown.target network-pre.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "544198000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2024-12-28 11:29:51 EST", "ConditionTimestampMonotonic": "332166794", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target ip6tables.service ebtables.service ipset.service iptables.service", "ControlGroup": "/system.slice/firewalld.service", "ControlGroupId": "4592", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveTasksMax": "22349", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Sat 2024-12-28 11:29:51 EST", "ExecMainHandoffTimestampMonotonic": "332196835", "ExecMainPID": "11035", "ExecMainStartTimestamp": "Sat 2024-12-28 11:29:51 EST", "ExecMainStartTimestampMonotonic": "332169481", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2024-12-28 11:29:51 EST", "InactiveExitTimestampMonotonic": "332170338", "InvocationID": "5e03e6ef9da5486cbe44b65fb67d7018", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "11035", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3118821376", "MemoryCurrent": "33103872", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "34783232", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectHome": "yes", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket system.slice sysinit.target", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2024-12-28 11:33:19 EST", "StateChangeTimestampMonotonic": "539791661", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28 Saturday 28 December 2024 11:33:33 -0500 (0:00:00.547) 0:00:23.257 ***** ok: [managed-node2] => { "changed": false, "enabled": true, "name": "firewalld", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2024-12-28 11:29:52 EST", "ActiveEnterTimestampMonotonic": "332814185", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "system.slice dbus-broker.service polkit.service basic.target sysinit.target dbus.socket", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2024-12-28 11:29:51 EST", "AssertTimestampMonotonic": "332166798", "Before": "multi-user.target shutdown.target network-pre.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "544198000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2024-12-28 11:29:51 EST", "ConditionTimestampMonotonic": "332166794", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target ip6tables.service ebtables.service ipset.service iptables.service", "ControlGroup": "/system.slice/firewalld.service", "ControlGroupId": "4592", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveTasksMax": "22349", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Sat 2024-12-28 11:29:51 EST", "ExecMainHandoffTimestampMonotonic": "332196835", "ExecMainPID": "11035", "ExecMainStartTimestamp": "Sat 2024-12-28 11:29:51 EST", "ExecMainStartTimestampMonotonic": "332169481", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2024-12-28 11:29:51 EST", "InactiveExitTimestampMonotonic": "332170338", "InvocationID": "5e03e6ef9da5486cbe44b65fb67d7018", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "11035", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3123294208", "MemoryCurrent": "33103872", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "34783232", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectHome": "yes", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket system.slice sysinit.target", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2024-12-28 11:33:19 EST", "StateChangeTimestampMonotonic": "539791661", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:34 Saturday 28 December 2024 11:33:33 -0500 (0:00:00.532) 0:00:23.790 ***** ok: [managed-node2] => { "ansible_facts": { "__firewall_previous_replaced": false, "__firewall_python_cmd": "/usr/bin/python3.12", "__firewall_report_changed": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:43 Saturday 28 December 2024 11:33:33 -0500 (0:00:00.041) 0:00:23.832 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:55 Saturday 28 December 2024 11:33:33 -0500 (0:00:00.028) 0:00:23.860 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Configure firewall] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71 Saturday 28 December 2024 11:33:33 -0500 (0:00:00.027) 0:00:23.887 ***** changed: [managed-node2] => (item={'port': '8000/tcp', 'state': 'enabled'}) => { "__firewall_changed": true, "ansible_loop_var": "item", "changed": true, "item": { "port": "8000/tcp", "state": "enabled" } } changed: [managed-node2] => (item={'port': '9000/tcp', 'state': 'enabled'}) => { "__firewall_changed": true, "ansible_loop_var": "item", "changed": true, "item": { "port": "9000/tcp", "state": "enabled" } } TASK [fedora.linux_system_roles.firewall : Gather firewall config information] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:120 Saturday 28 December 2024 11:33:34 -0500 (0:00:01.090) 0:00:24.978 ***** skipping: [managed-node2] => (item={'port': '8000/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall | length == 1", "item": { "port": "8000/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item={'port': '9000/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall | length == 1", "item": { "port": "9000/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130 Saturday 28 December 2024 11:33:34 -0500 (0:00:00.063) 0:00:25.042 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall | length == 1", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:139 Saturday 28 December 2024 11:33:34 -0500 (0:00:00.034) 0:00:25.076 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:144 Saturday 28 December 2024 11:33:34 -0500 (0:00:00.029) 0:00:25.105 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:153 Saturday 28 December 2024 11:33:34 -0500 (0:00:00.035) 0:00:25.141 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Calculate what has changed] ********* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:163 Saturday 28 December 2024 11:33:34 -0500 (0:00:00.048) 0:00:25.189 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Show diffs] ************************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:169 Saturday 28 December 2024 11:33:35 -0500 (0:00:00.114) 0:00:25.304 ***** skipping: [managed-node2] => { "false_condition": "__firewall_previous_replaced | bool" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Saturday 28 December 2024 11:33:35 -0500 (0:00:00.095) 0:00:25.399 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Saturday 28 December 2024 11:33:35 -0500 (0:00:00.064) 0:00:25.464 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Saturday 28 December 2024 11:33:35 -0500 (0:00:00.040) 0:00:25.505 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Saturday 28 December 2024 11:33:35 -0500 (0:00:00.034) 0:00:25.540 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Saturday 28 December 2024 11:33:35 -0500 (0:00:00.034) 0:00:25.574 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 28 December 2024 11:33:35 -0500 (0:00:00.143) 0:00:25.718 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 28 December 2024 11:33:35 -0500 (0:00:00.056) 0:00:25.775 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:13 Saturday 28 December 2024 11:33:35 -0500 (0:00:00.045) 0:00:25.820 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 28 December 2024 11:33:35 -0500 (0:00:00.056) 0:00:25.877 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 28 December 2024 11:33:35 -0500 (0:00:00.040) 0:00:25.917 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 28 December 2024 11:33:35 -0500 (0:00:00.044) 0:00:25.961 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:18 Saturday 28 December 2024 11:33:35 -0500 (0:00:00.049) 0:00:26.011 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:34 Saturday 28 December 2024 11:33:35 -0500 (0:00:00.047) 0:00:26.058 ***** [WARNING]: Using a variable for a task's 'args' is unsafe in some situations (see https://docs.ansible.com/ansible/devel/reference_appendices/faq.html#argsplat- unsafe) changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 28 December 2024 11:33:36 -0500 (0:00:00.694) 0:00:26.753 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 28 December 2024 11:33:36 -0500 (0:00:00.056) 0:00:26.809 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:13 Saturday 28 December 2024 11:33:36 -0500 (0:00:00.100) 0:00:26.910 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 28 December 2024 11:33:36 -0500 (0:00:00.166) 0:00:27.076 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 28 December 2024 11:33:36 -0500 (0:00:00.093) 0:00:27.170 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 28 December 2024 11:33:36 -0500 (0:00:00.061) 0:00:27.231 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:18 Saturday 28 December 2024 11:33:37 -0500 (0:00:00.051) 0:00:27.283 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:34 Saturday 28 December 2024 11:33:37 -0500 (0:00:00.052) 0:00:27.336 ***** changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 28 December 2024 11:33:37 -0500 (0:00:00.567) 0:00:27.903 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 28 December 2024 11:33:37 -0500 (0:00:00.055) 0:00:27.959 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:13 Saturday 28 December 2024 11:33:37 -0500 (0:00:00.061) 0:00:28.020 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 28 December 2024 11:33:37 -0500 (0:00:00.100) 0:00:28.120 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 28 December 2024 11:33:37 -0500 (0:00:00.038) 0:00:28.159 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 28 December 2024 11:33:37 -0500 (0:00:00.040) 0:00:28.200 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:18 Saturday 28 December 2024 11:33:37 -0500 (0:00:00.037) 0:00:28.238 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:34 Saturday 28 December 2024 11:33:38 -0500 (0:00:00.047) 0:00:28.285 ***** changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Saturday 28 December 2024 11:33:38 -0500 (0:00:00.504) 0:00:28.789 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Saturday 28 December 2024 11:33:38 -0500 (0:00:00.045) 0:00:28.835 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 28 December 2024 11:33:38 -0500 (0:00:00.170) 0:00:29.005 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "quadlet-demo.network", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Network]\nSubnet=192.168.30.0/24\nGateway=192.168.30.1\nLabel=app=wordpress", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 28 December 2024 11:33:38 -0500 (0:00:00.041) 0:00:29.046 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 28 December 2024 11:33:38 -0500 (0:00:00.041) 0:00:29.088 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 28 December 2024 11:33:38 -0500 (0:00:00.105) 0:00:29.194 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo", "__podman_quadlet_type": "network", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 28 December 2024 11:33:39 -0500 (0:00:00.074) 0:00:29.269 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 28 December 2024 11:33:39 -0500 (0:00:00.156) 0:00:29.425 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 28 December 2024 11:33:39 -0500 (0:00:00.053) 0:00:29.479 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 28 December 2024 11:33:39 -0500 (0:00:00.060) 0:00:29.539 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 28 December 2024 11:33:39 -0500 (0:00:00.066) 0:00:29.606 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1735403385.845081, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1735403362.2548923, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3230249097", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 28 December 2024 11:33:39 -0500 (0:00:00.442) 0:00:30.048 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 28 December 2024 11:33:39 -0500 (0:00:00.049) 0:00:30.097 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 28 December 2024 11:33:39 -0500 (0:00:00.032) 0:00:30.129 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 28 December 2024 11:33:39 -0500 (0:00:00.030) 0:00:30.160 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 28 December 2024 11:33:39 -0500 (0:00:00.029) 0:00:30.190 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 28 December 2024 11:33:39 -0500 (0:00:00.033) 0:00:30.224 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 28 December 2024 11:33:40 -0500 (0:00:00.030) 0:00:30.255 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 28 December 2024 11:33:40 -0500 (0:00:00.028) 0:00:30.283 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 28 December 2024 11:33:40 -0500 (0:00:00.028) 0:00:30.311 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-demo-network.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 28 December 2024 11:33:40 -0500 (0:00:00.052) 0:00:30.364 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 28 December 2024 11:33:40 -0500 (0:00:00.030) 0:00:30.395 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 28 December 2024 11:33:40 -0500 (0:00:00.039) 0:00:30.435 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.network", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 28 December 2024 11:33:40 -0500 (0:00:00.119) 0:00:30.555 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 28 December 2024 11:33:40 -0500 (0:00:00.038) 0:00:30.593 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 28 December 2024 11:33:40 -0500 (0:00:00.032) 0:00:30.626 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 28 December 2024 11:33:40 -0500 (0:00:00.110) 0:00:30.736 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 28 December 2024 11:33:40 -0500 (0:00:00.052) 0:00:30.789 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 28 December 2024 11:33:40 -0500 (0:00:00.028) 0:00:30.817 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 28 December 2024 11:33:40 -0500 (0:00:00.027) 0:00:30.845 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 28 December 2024 11:33:40 -0500 (0:00:00.026) 0:00:30.872 ***** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 28 December 2024 11:33:40 -0500 (0:00:00.025) 0:00:30.897 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 28 December 2024 11:33:40 -0500 (0:00:00.025) 0:00:30.923 ***** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 28 December 2024 11:33:41 -0500 (0:00:00.381) 0:00:31.305 ***** changed: [managed-node2] => { "changed": true, "checksum": "e57c08d49aff4bae8daab138d913aeddaa8682a0", "dest": "/etc/containers/systemd/quadlet-demo.network", "gid": 0, "group": "root", "md5sum": "061f3cf318cbd8ab5794bb1173831fb8", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 74, "src": "/root/.ansible/tmp/ansible-tmp-1735403621.1117623-19088-239700523427720/.source.network", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 28 December 2024 11:33:41 -0500 (0:00:00.856) 0:00:32.161 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 28 December 2024 11:33:41 -0500 (0:00:00.050) 0:00:32.212 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_copy_file is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 28 December 2024 11:33:42 -0500 (0:00:00.046) 0:00:32.258 ***** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 28 December 2024 11:33:42 -0500 (0:00:00.803) 0:00:33.061 ***** changed: [managed-node2] => { "changed": true, "name": "quadlet-demo-network.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "-.mount network-online.target systemd-journald.socket sysinit.target basic.target system.slice", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-demo-network.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveTasksMax": "22349", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo-network.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo-network.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3143344128", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo-network.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "sysinit.target -.mount system.slice", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo.network", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo-network", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 28 December 2024 11:33:43 -0500 (0:00:00.611) 0:00:33.673 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 28 December 2024 11:33:43 -0500 (0:00:00.033) 0:00:33.706 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "quadlet-demo-mysql.volume", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Volume]", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 28 December 2024 11:33:43 -0500 (0:00:00.044) 0:00:33.750 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 28 December 2024 11:33:43 -0500 (0:00:00.041) 0:00:33.792 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 28 December 2024 11:33:43 -0500 (0:00:00.031) 0:00:33.824 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo-mysql", "__podman_quadlet_type": "volume", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 28 December 2024 11:33:43 -0500 (0:00:00.056) 0:00:33.880 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 28 December 2024 11:33:43 -0500 (0:00:00.197) 0:00:34.077 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 28 December 2024 11:33:43 -0500 (0:00:00.067) 0:00:34.145 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 28 December 2024 11:33:43 -0500 (0:00:00.060) 0:00:34.206 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 28 December 2024 11:33:44 -0500 (0:00:00.067) 0:00:34.274 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1735403385.845081, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1735403362.2548923, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3230249097", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 28 December 2024 11:33:44 -0500 (0:00:00.481) 0:00:34.755 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 28 December 2024 11:33:44 -0500 (0:00:00.054) 0:00:34.810 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 28 December 2024 11:33:44 -0500 (0:00:00.090) 0:00:34.900 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 28 December 2024 11:33:44 -0500 (0:00:00.094) 0:00:34.994 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 28 December 2024 11:33:44 -0500 (0:00:00.098) 0:00:35.093 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 28 December 2024 11:33:44 -0500 (0:00:00.065) 0:00:35.159 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 28 December 2024 11:33:44 -0500 (0:00:00.071) 0:00:35.231 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 28 December 2024 11:33:45 -0500 (0:00:00.068) 0:00:35.300 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 28 December 2024 11:33:45 -0500 (0:00:00.073) 0:00:35.374 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-demo-mysql-volume.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 28 December 2024 11:33:45 -0500 (0:00:00.132) 0:00:35.506 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 28 December 2024 11:33:45 -0500 (0:00:00.105) 0:00:35.612 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 28 December 2024 11:33:45 -0500 (0:00:00.099) 0:00:35.712 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.volume", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 28 December 2024 11:33:45 -0500 (0:00:00.193) 0:00:35.906 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 28 December 2024 11:33:45 -0500 (0:00:00.044) 0:00:35.950 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 28 December 2024 11:33:45 -0500 (0:00:00.031) 0:00:35.982 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 28 December 2024 11:33:45 -0500 (0:00:00.070) 0:00:36.053 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 28 December 2024 11:33:45 -0500 (0:00:00.087) 0:00:36.140 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 28 December 2024 11:33:45 -0500 (0:00:00.044) 0:00:36.184 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 28 December 2024 11:33:45 -0500 (0:00:00.047) 0:00:36.231 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 28 December 2024 11:33:46 -0500 (0:00:00.050) 0:00:36.282 ***** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 28 December 2024 11:33:46 -0500 (0:00:00.069) 0:00:36.351 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 28 December 2024 11:33:46 -0500 (0:00:00.065) 0:00:36.417 ***** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 34, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 28 December 2024 11:33:46 -0500 (0:00:00.415) 0:00:36.832 ***** changed: [managed-node2] => { "changed": true, "checksum": "585f8cbdf0ec73000f9227dcffbef71e9552ea4a", "dest": "/etc/containers/systemd/quadlet-demo-mysql.volume", "gid": 0, "group": "root", "md5sum": "5ddd03a022aeb4502d9bc8ce436b4233", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 9, "src": "/root/.ansible/tmp/ansible-tmp-1735403626.645213-19345-73721816979131/.source.volume", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 28 December 2024 11:33:47 -0500 (0:00:00.732) 0:00:37.565 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 28 December 2024 11:33:47 -0500 (0:00:00.045) 0:00:37.610 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_copy_file is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 28 December 2024 11:33:47 -0500 (0:00:00.047) 0:00:37.658 ***** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 28 December 2024 11:33:48 -0500 (0:00:00.768) 0:00:38.426 ***** changed: [managed-node2] => { "changed": true, "name": "quadlet-demo-mysql-volume.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "systemd-journald.socket network-online.target sysinit.target basic.target -.mount system.slice", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-demo-mysql-volume.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveTasksMax": "22349", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo-mysql-volume.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo-mysql-volume.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3114045440", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo-mysql-volume.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "-.mount sysinit.target system.slice", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.volume", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo-mysql-volume", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 28 December 2024 11:33:48 -0500 (0:00:00.610) 0:00:39.037 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 28 December 2024 11:33:48 -0500 (0:00:00.034) 0:00:39.072 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Container]\nImage=quay.io/linux-system-roles/mysql:5.6\nContainerName=quadlet-demo-mysql\nVolume=quadlet-demo-mysql.volume:/var/lib/mysql\nVolume=/tmp/quadlet_demo:/var/lib/quadlet_demo:Z\nNetwork=quadlet-demo.network\nSecret=mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD\nHealthCmd=/bin/true\nHealthOnFailure=kill\n", "__podman_quadlet_template_src": "quadlet-demo-mysql.container.j2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 28 December 2024 11:33:48 -0500 (0:00:00.122) 0:00:39.195 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 28 December 2024 11:33:48 -0500 (0:00:00.044) 0:00:39.239 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_str", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 28 December 2024 11:33:49 -0500 (0:00:00.038) 0:00:39.277 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo-mysql", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 28 December 2024 11:33:49 -0500 (0:00:00.050) 0:00:39.327 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 28 December 2024 11:33:49 -0500 (0:00:00.059) 0:00:39.386 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 28 December 2024 11:33:49 -0500 (0:00:00.033) 0:00:39.420 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 28 December 2024 11:33:49 -0500 (0:00:00.034) 0:00:39.454 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 28 December 2024 11:33:49 -0500 (0:00:00.086) 0:00:39.540 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1735403385.845081, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1735403362.2548923, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3230249097", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 28 December 2024 11:33:49 -0500 (0:00:00.396) 0:00:39.936 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 28 December 2024 11:33:49 -0500 (0:00:00.043) 0:00:39.980 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 28 December 2024 11:33:49 -0500 (0:00:00.031) 0:00:40.011 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 28 December 2024 11:33:49 -0500 (0:00:00.027) 0:00:40.039 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 28 December 2024 11:33:49 -0500 (0:00:00.027) 0:00:40.066 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 28 December 2024 11:33:49 -0500 (0:00:00.027) 0:00:40.093 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 28 December 2024 11:33:49 -0500 (0:00:00.027) 0:00:40.120 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 28 December 2024 11:33:49 -0500 (0:00:00.028) 0:00:40.149 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 28 December 2024 11:33:49 -0500 (0:00:00.030) 0:00:40.179 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-demo-mysql.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 28 December 2024 11:33:50 -0500 (0:00:00.077) 0:00:40.257 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 28 December 2024 11:33:50 -0500 (0:00:00.054) 0:00:40.311 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 28 December 2024 11:33:50 -0500 (0:00:00.054) 0:00:40.366 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.container", "__podman_volumes": [ "/tmp/quadlet_demo" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 28 December 2024 11:33:50 -0500 (0:00:00.126) 0:00:40.492 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 28 December 2024 11:33:50 -0500 (0:00:00.062) 0:00:40.555 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 28 December 2024 11:33:50 -0500 (0:00:00.048) 0:00:40.604 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 28 December 2024 11:33:50 -0500 (0:00:00.089) 0:00:40.694 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 28 December 2024 11:33:50 -0500 (0:00:00.066) 0:00:40.760 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 28 December 2024 11:33:50 -0500 (0:00:00.035) 0:00:40.795 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 28 December 2024 11:33:50 -0500 (0:00:00.083) 0:00:40.879 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 28 December 2024 11:33:50 -0500 (0:00:00.030) 0:00:40.909 ***** changed: [managed-node2] => (item=/tmp/quadlet_demo) => { "ansible_loop_var": "item", "changed": true, "gid": 0, "group": "root", "item": "/tmp/quadlet_demo", "mode": "0777", "owner": "root", "path": "/tmp/quadlet_demo", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 28 December 2024 11:33:51 -0500 (0:00:00.403) 0:00:41.313 ***** changed: [managed-node2] => (item=None) => { "attempts": 1, "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 28 December 2024 11:33:58 -0500 (0:00:07.193) 0:00:48.507 ***** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 67, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 28 December 2024 11:33:58 -0500 (0:00:00.401) 0:00:48.908 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 28 December 2024 11:33:58 -0500 (0:00:00.032) 0:00:48.940 ***** changed: [managed-node2] => { "changed": true, "checksum": "ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4", "dest": "/etc/containers/systemd/quadlet-demo-mysql.container", "gid": 0, "group": "root", "md5sum": "341b473056d2a5dfa35970b0d2e23a5d", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 363, "src": "/root/.ansible/tmp/ansible-tmp-1735403638.7432506-19770-169391064880553/.source.container", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 28 December 2024 11:33:59 -0500 (0:00:00.727) 0:00:49.667 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_copy_content is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 28 December 2024 11:33:59 -0500 (0:00:00.051) 0:00:49.719 ***** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 28 December 2024 11:34:00 -0500 (0:00:00.767) 0:00:50.486 ***** changed: [managed-node2] => { "changed": true, "name": "quadlet-demo-mysql.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "quadlet-demo-network.service system.slice tmp.mount basic.target network-online.target quadlet-demo-mysql-volume.service sysinit.target systemd-journald.socket -.mount", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target multi-user.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuset io memory pids", "Description": "quadlet-demo-mysql.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveTasksMax": "22349", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo-mysql.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo-mysql.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "2953359360", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo-mysql.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "continue", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice -.mount quadlet-demo-network.service sysinit.target quadlet-demo-mysql-volume.service", "RequiresMountsFor": "/run/containers /tmp/quadlet_demo", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.container", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo-mysql", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 28 December 2024 11:34:01 -0500 (0:00:00.915) 0:00:51.402 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 28 December 2024 11:34:01 -0500 (0:00:00.052) 0:00:51.454 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "envoy-proxy-configmap.yml", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "---\napiVersion: v1\nkind: ConfigMap\nmetadata:\n name: envoy-proxy-config\ndata:\n envoy.yaml: |\n admin:\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 9901\n\n static_resources:\n listeners:\n - name: listener_0\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 8080\n filter_chains:\n - filters:\n - name: envoy.filters.network.http_connection_manager\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager\n stat_prefix: ingress_http\n codec_type: AUTO\n route_config:\n name: local_route\n virtual_hosts:\n - name: local_service\n domains: [\"*\"]\n routes:\n - match:\n prefix: \"/\"\n route:\n cluster: backend\n http_filters:\n - name: envoy.filters.http.router\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.http.router.v3.Router\n transport_socket:\n name: envoy.transport_sockets.tls\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.transport_sockets.tls.v3.DownstreamTlsContext\n common_tls_context:\n tls_certificates:\n - certificate_chain:\n filename: /etc/envoy-certificates/certificate.pem\n private_key:\n filename: /etc/envoy-certificates/certificate.key\n clusters:\n - name: backend\n connect_timeout: 5s\n type: STATIC\n dns_refresh_rate: 1800s\n lb_policy: ROUND_ROBIN\n load_assignment:\n cluster_name: backend\n endpoints:\n - lb_endpoints:\n - endpoint:\n address:\n socket_address:\n address: 127.0.0.1\n port_value: 80", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 28 December 2024 11:34:01 -0500 (0:00:00.051) 0:00:51.505 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 28 December 2024 11:34:01 -0500 (0:00:00.050) 0:00:51.556 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 28 December 2024 11:34:01 -0500 (0:00:00.037) 0:00:51.593 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "envoy-proxy-configmap", "__podman_quadlet_type": "yml", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 28 December 2024 11:34:01 -0500 (0:00:00.055) 0:00:51.649 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 28 December 2024 11:34:01 -0500 (0:00:00.058) 0:00:51.707 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 28 December 2024 11:34:01 -0500 (0:00:00.036) 0:00:51.744 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 28 December 2024 11:34:01 -0500 (0:00:00.033) 0:00:51.777 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 28 December 2024 11:34:01 -0500 (0:00:00.041) 0:00:51.819 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1735403385.845081, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1735403362.2548923, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3230249097", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 28 December 2024 11:34:02 -0500 (0:00:00.431) 0:00:52.251 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 28 December 2024 11:34:02 -0500 (0:00:00.035) 0:00:52.287 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 28 December 2024 11:34:02 -0500 (0:00:00.068) 0:00:52.355 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 28 December 2024 11:34:02 -0500 (0:00:00.043) 0:00:52.399 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 28 December 2024 11:34:02 -0500 (0:00:00.034) 0:00:52.434 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 28 December 2024 11:34:02 -0500 (0:00:00.028) 0:00:52.462 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 28 December 2024 11:34:02 -0500 (0:00:00.028) 0:00:52.491 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 28 December 2024 11:34:02 -0500 (0:00:00.028) 0:00:52.519 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 28 December 2024 11:34:02 -0500 (0:00:00.028) 0:00:52.547 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 28 December 2024 11:34:02 -0500 (0:00:00.050) 0:00:52.598 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 28 December 2024 11:34:02 -0500 (0:00:00.041) 0:00:52.640 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 28 December 2024 11:34:02 -0500 (0:00:00.048) 0:00:52.689 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/envoy-proxy-configmap.yml", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 28 December 2024 11:34:02 -0500 (0:00:00.087) 0:00:52.777 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 28 December 2024 11:34:02 -0500 (0:00:00.041) 0:00:52.818 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 28 December 2024 11:34:02 -0500 (0:00:00.031) 0:00:52.849 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 28 December 2024 11:34:02 -0500 (0:00:00.070) 0:00:52.920 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 28 December 2024 11:34:02 -0500 (0:00:00.051) 0:00:52.971 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 28 December 2024 11:34:02 -0500 (0:00:00.029) 0:00:53.001 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 28 December 2024 11:34:02 -0500 (0:00:00.028) 0:00:53.029 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 28 December 2024 11:34:02 -0500 (0:00:00.027) 0:00:53.057 ***** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 28 December 2024 11:34:02 -0500 (0:00:00.024) 0:00:53.081 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 28 December 2024 11:34:02 -0500 (0:00:00.064) 0:00:53.145 ***** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 103, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 28 December 2024 11:34:03 -0500 (0:00:00.445) 0:00:53.590 ***** changed: [managed-node2] => { "changed": true, "checksum": "d681c7d56f912150d041873e880818b22a90c188", "dest": "/etc/containers/systemd/envoy-proxy-configmap.yml", "gid": 0, "group": "root", "md5sum": "aec75d972c231aac004e1338934544cf", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 2102, "src": "/root/.ansible/tmp/ansible-tmp-1735403643.3933573-19976-127622001330519/.source.yml", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 28 December 2024 11:34:04 -0500 (0:00:00.708) 0:00:54.299 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 28 December 2024 11:34:04 -0500 (0:00:00.035) 0:00:54.334 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_copy_file is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 28 December 2024 11:34:04 -0500 (0:00:00.033) 0:00:54.368 ***** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 28 December 2024 11:34:04 -0500 (0:00:00.825) 0:00:55.194 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_service_name | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 28 December 2024 11:34:04 -0500 (0:00:00.043) 0:00:55.237 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_service_name | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 28 December 2024 11:34:05 -0500 (0:00:00.042) 0:00:55.279 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "---\napiVersion: v1\nkind: PersistentVolumeClaim\nmetadata:\n name: wp-pv-claim\n labels:\n app: wordpress\nspec:\n accessModes:\n - ReadWriteOnce\n resources:\n requests:\n storage: 20Gi\n---\napiVersion: v1\nkind: Pod\nmetadata:\n name: quadlet-demo\nspec:\n containers:\n - name: wordpress\n image: quay.io/linux-system-roles/wordpress:4.8-apache\n env:\n - name: WORDPRESS_DB_HOST\n value: quadlet-demo-mysql\n - name: WORDPRESS_DB_PASSWORD\n valueFrom:\n secretKeyRef:\n name: mysql-root-password-kube\n key: password\n volumeMounts:\n - name: wordpress-persistent-storage\n mountPath: /var/www/html\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n - name: envoy\n image: quay.io/linux-system-roles/envoyproxy:v1.25.0\n volumeMounts:\n - name: config-volume\n mountPath: /etc/envoy\n - name: certificates\n mountPath: /etc/envoy-certificates\n env:\n - name: ENVOY_UID\n value: \"0\"\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n volumes:\n - name: config-volume\n configMap:\n name: envoy-proxy-config\n - name: certificates\n secret:\n secretName: envoy-certificates\n - name: wordpress-persistent-storage\n persistentVolumeClaim:\n claimName: wp-pv-claim\n - name: www # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3\n - name: create # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3-create\n", "__podman_quadlet_template_src": "quadlet-demo.yml.j2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 28 December 2024 11:34:05 -0500 (0:00:00.147) 0:00:55.427 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 28 December 2024 11:34:05 -0500 (0:00:00.045) 0:00:55.472 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_str", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 28 December 2024 11:34:05 -0500 (0:00:00.067) 0:00:55.540 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo", "__podman_quadlet_type": "yml", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 28 December 2024 11:34:05 -0500 (0:00:00.053) 0:00:55.594 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 28 December 2024 11:34:05 -0500 (0:00:00.059) 0:00:55.653 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 28 December 2024 11:34:05 -0500 (0:00:00.036) 0:00:55.689 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 28 December 2024 11:34:05 -0500 (0:00:00.034) 0:00:55.724 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 28 December 2024 11:34:05 -0500 (0:00:00.043) 0:00:55.768 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1735403385.845081, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1735403362.2548923, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3230249097", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 28 December 2024 11:34:05 -0500 (0:00:00.381) 0:00:56.149 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 28 December 2024 11:34:05 -0500 (0:00:00.033) 0:00:56.183 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 28 December 2024 11:34:05 -0500 (0:00:00.031) 0:00:56.214 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 28 December 2024 11:34:06 -0500 (0:00:00.031) 0:00:56.245 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 28 December 2024 11:34:06 -0500 (0:00:00.029) 0:00:56.275 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 28 December 2024 11:34:06 -0500 (0:00:00.080) 0:00:56.355 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 28 December 2024 11:34:06 -0500 (0:00:00.031) 0:00:56.387 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 28 December 2024 11:34:06 -0500 (0:00:00.031) 0:00:56.418 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 28 December 2024 11:34:06 -0500 (0:00:00.031) 0:00:56.450 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 28 December 2024 11:34:06 -0500 (0:00:00.055) 0:00:56.506 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 28 December 2024 11:34:06 -0500 (0:00:00.035) 0:00:56.542 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 28 December 2024 11:34:06 -0500 (0:00:00.050) 0:00:56.592 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.yml", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 28 December 2024 11:34:06 -0500 (0:00:00.075) 0:00:56.668 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 28 December 2024 11:34:06 -0500 (0:00:00.040) 0:00:56.708 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 28 December 2024 11:34:06 -0500 (0:00:00.030) 0:00:56.738 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 28 December 2024 11:34:06 -0500 (0:00:00.067) 0:00:56.806 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 28 December 2024 11:34:06 -0500 (0:00:00.054) 0:00:56.860 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 28 December 2024 11:34:06 -0500 (0:00:00.031) 0:00:56.891 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 28 December 2024 11:34:06 -0500 (0:00:00.030) 0:00:56.922 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 28 December 2024 11:34:06 -0500 (0:00:00.031) 0:00:56.954 ***** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 28 December 2024 11:34:06 -0500 (0:00:00.026) 0:00:56.980 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 28 December 2024 11:34:06 -0500 (0:00:00.030) 0:00:57.011 ***** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 136, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 28 December 2024 11:34:07 -0500 (0:00:00.426) 0:00:57.438 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 28 December 2024 11:34:07 -0500 (0:00:00.030) 0:00:57.468 ***** changed: [managed-node2] => { "changed": true, "checksum": "998dccde0483b1654327a46ddd89cbaa47650370", "dest": "/etc/containers/systemd/quadlet-demo.yml", "gid": 0, "group": "root", "md5sum": "fd890594adfc24339cb9cdc5e7b19a66", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 1605, "src": "/root/.ansible/tmp/ansible-tmp-1735403647.2769017-20133-246311741527282/.source.yml", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 28 December 2024 11:34:08 -0500 (0:00:01.025) 0:00:58.494 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_copy_content is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 28 December 2024 11:34:08 -0500 (0:00:00.070) 0:00:58.564 ***** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 28 December 2024 11:34:09 -0500 (0:00:00.923) 0:00:59.488 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_service_name | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 28 December 2024 11:34:09 -0500 (0:00:00.036) 0:00:59.524 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_service_name | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 28 December 2024 11:34:09 -0500 (0:00:00.035) 0:00:59.560 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "quadlet-demo.kube", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Unit]\nRequires=quadlet-demo-mysql.service\nAfter=quadlet-demo-mysql.service\n\n[Kube]\n# Point to the yaml file in the same directory\nYaml=quadlet-demo.yml\n# Use the quadlet-demo network\nNetwork=quadlet-demo.network\n# Publish the envoy proxy data port\nPublishPort=8000:8080\n# Publish the envoy proxy admin port\nPublishPort=9000:9901\n# Use the envoy proxy config map in the same directory\nConfigMap=envoy-proxy-configmap.yml", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 28 December 2024 11:34:09 -0500 (0:00:00.050) 0:00:59.610 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 28 December 2024 11:34:09 -0500 (0:00:00.058) 0:00:59.669 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 28 December 2024 11:34:09 -0500 (0:00:00.073) 0:00:59.742 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo", "__podman_quadlet_type": "kube", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 28 December 2024 11:34:09 -0500 (0:00:00.053) 0:00:59.796 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 28 December 2024 11:34:09 -0500 (0:00:00.068) 0:00:59.865 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 28 December 2024 11:34:09 -0500 (0:00:00.036) 0:00:59.901 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 28 December 2024 11:34:09 -0500 (0:00:00.037) 0:00:59.939 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 28 December 2024 11:34:09 -0500 (0:00:00.039) 0:00:59.978 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1735403385.845081, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1735403362.2548923, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3230249097", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 28 December 2024 11:34:10 -0500 (0:00:00.372) 0:01:00.350 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 28 December 2024 11:34:10 -0500 (0:00:00.028) 0:01:00.379 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 28 December 2024 11:34:10 -0500 (0:00:00.028) 0:01:00.407 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 28 December 2024 11:34:10 -0500 (0:00:00.027) 0:01:00.435 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 28 December 2024 11:34:10 -0500 (0:00:00.027) 0:01:00.463 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 28 December 2024 11:34:10 -0500 (0:00:00.027) 0:01:00.490 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 28 December 2024 11:34:10 -0500 (0:00:00.028) 0:01:00.518 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 28 December 2024 11:34:10 -0500 (0:00:00.031) 0:01:00.550 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 28 December 2024 11:34:10 -0500 (0:00:00.045) 0:01:00.596 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": [ "quadlet-demo.yml" ], "__podman_service_name": "quadlet-demo.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 28 December 2024 11:34:10 -0500 (0:00:00.197) 0:01:00.793 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 28 December 2024 11:34:10 -0500 (0:00:00.039) 0:01:00.832 ***** ok: [managed-node2] => { "changed": false, "content": "LS0tCmFwaVZlcnNpb246IHYxCmtpbmQ6IFBlcnNpc3RlbnRWb2x1bWVDbGFpbQptZXRhZGF0YToKICBuYW1lOiB3cC1wdi1jbGFpbQogIGxhYmVsczoKICAgIGFwcDogd29yZHByZXNzCnNwZWM6CiAgYWNjZXNzTW9kZXM6CiAgLSBSZWFkV3JpdGVPbmNlCiAgcmVzb3VyY2VzOgogICAgcmVxdWVzdHM6CiAgICAgIHN0b3JhZ2U6IDIwR2kKLS0tCmFwaVZlcnNpb246IHYxCmtpbmQ6IFBvZAptZXRhZGF0YToKICBuYW1lOiBxdWFkbGV0LWRlbW8Kc3BlYzoKICBjb250YWluZXJzOgogIC0gbmFtZTogd29yZHByZXNzCiAgICBpbWFnZTogcXVheS5pby9saW51eC1zeXN0ZW0tcm9sZXMvd29yZHByZXNzOjQuOC1hcGFjaGUKICAgIGVudjoKICAgIC0gbmFtZTogV09SRFBSRVNTX0RCX0hPU1QKICAgICAgdmFsdWU6IHF1YWRsZXQtZGVtby1teXNxbAogICAgLSBuYW1lOiBXT1JEUFJFU1NfREJfUEFTU1dPUkQKICAgICAgdmFsdWVGcm9tOgogICAgICAgIHNlY3JldEtleVJlZjoKICAgICAgICAgIG5hbWU6IG15c3FsLXJvb3QtcGFzc3dvcmQta3ViZQogICAgICAgICAga2V5OiBwYXNzd29yZAogICAgdm9sdW1lTW91bnRzOgogICAgLSBuYW1lOiB3b3JkcHJlc3MtcGVyc2lzdGVudC1zdG9yYWdlCiAgICAgIG1vdW50UGF0aDogL3Zhci93d3cvaHRtbAogICAgcmVzb3VyY2VzOgogICAgICByZXF1ZXN0czoKICAgICAgICBtZW1vcnk6ICI2NE1pIgogICAgICAgIGNwdTogIjI1MG0iCiAgICAgIGxpbWl0czoKICAgICAgICBtZW1vcnk6ICIxMjhNaSIKICAgICAgICBjcHU6ICI1MDBtIgogIC0gbmFtZTogZW52b3kKICAgIGltYWdlOiBxdWF5LmlvL2xpbnV4LXN5c3RlbS1yb2xlcy9lbnZveXByb3h5OnYxLjI1LjAKICAgIHZvbHVtZU1vdW50czoKICAgIC0gbmFtZTogY29uZmlnLXZvbHVtZQogICAgICBtb3VudFBhdGg6IC9ldGMvZW52b3kKICAgIC0gbmFtZTogY2VydGlmaWNhdGVzCiAgICAgIG1vdW50UGF0aDogL2V0Yy9lbnZveS1jZXJ0aWZpY2F0ZXMKICAgIGVudjoKICAgIC0gbmFtZTogRU5WT1lfVUlECiAgICAgIHZhbHVlOiAiMCIKICAgIHJlc291cmNlczoKICAgICAgcmVxdWVzdHM6CiAgICAgICAgbWVtb3J5OiAiNjRNaSIKICAgICAgICBjcHU6ICIyNTBtIgogICAgICBsaW1pdHM6CiAgICAgICAgbWVtb3J5OiAiMTI4TWkiCiAgICAgICAgY3B1OiAiNTAwbSIKICB2b2x1bWVzOgogIC0gbmFtZTogY29uZmlnLXZvbHVtZQogICAgY29uZmlnTWFwOgogICAgICBuYW1lOiBlbnZveS1wcm94eS1jb25maWcKICAtIG5hbWU6IGNlcnRpZmljYXRlcwogICAgc2VjcmV0OgogICAgICBzZWNyZXROYW1lOiBlbnZveS1jZXJ0aWZpY2F0ZXMKICAtIG5hbWU6IHdvcmRwcmVzcy1wZXJzaXN0ZW50LXN0b3JhZ2UKICAgIHBlcnNpc3RlbnRWb2x1bWVDbGFpbToKICAgICAgY2xhaW1OYW1lOiB3cC1wdi1jbGFpbQogIC0gbmFtZTogd3d3ICAjIG5vdCB1c2VkIC0gZm9yIHRlc3RpbmcgaG9zdHBhdGgKICAgIGhvc3RQYXRoOgogICAgICBwYXRoOiAvdG1wL2h0dHBkMwogIC0gbmFtZTogY3JlYXRlICAjIG5vdCB1c2VkIC0gZm9yIHRlc3RpbmcgaG9zdHBhdGgKICAgIGhvc3RQYXRoOgogICAgICBwYXRoOiAvdG1wL2h0dHBkMy1jcmVhdGUK", "encoding": "base64", "source": "/etc/containers/systemd/quadlet-demo.yml" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 28 December 2024 11:34:11 -0500 (0:00:00.492) 0:01:01.325 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [ "quay.io/linux-system-roles/wordpress:4.8-apache", "quay.io/linux-system-roles/envoyproxy:v1.25.0" ], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.kube", "__podman_volumes": [ "/tmp/httpd3", "/tmp/httpd3-create" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 28 December 2024 11:34:11 -0500 (0:00:00.084) 0:01:01.410 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 28 December 2024 11:34:11 -0500 (0:00:00.039) 0:01:01.449 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 28 December 2024 11:34:11 -0500 (0:00:00.029) 0:01:01.478 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 28 December 2024 11:34:11 -0500 (0:00:00.063) 0:01:01.542 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 28 December 2024 11:34:11 -0500 (0:00:00.050) 0:01:01.592 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 28 December 2024 11:34:11 -0500 (0:00:00.028) 0:01:01.621 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 28 December 2024 11:34:11 -0500 (0:00:00.028) 0:01:01.649 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 28 December 2024 11:34:11 -0500 (0:00:00.026) 0:01:01.676 ***** changed: [managed-node2] => (item=/tmp/httpd3) => { "ansible_loop_var": "item", "changed": true, "gid": 0, "group": "root", "item": "/tmp/httpd3", "mode": "0755", "owner": "root", "path": "/tmp/httpd3", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 6, "state": "directory", "uid": 0 } changed: [managed-node2] => (item=/tmp/httpd3-create) => { "ansible_loop_var": "item", "changed": true, "gid": 0, "group": "root", "item": "/tmp/httpd3-create", "mode": "0755", "owner": "root", "path": "/tmp/httpd3-create", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 28 December 2024 11:34:12 -0500 (0:00:00.730) 0:01:02.406 ***** changed: [managed-node2] => (item=None) => { "attempts": 1, "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => (item=None) => { "attempts": 1, "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 28 December 2024 11:34:29 -0500 (0:00:16.988) 0:01:19.395 ***** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 160, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 28 December 2024 11:34:29 -0500 (0:00:00.383) 0:01:19.778 ***** changed: [managed-node2] => { "changed": true, "checksum": "7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7", "dest": "/etc/containers/systemd/quadlet-demo.kube", "gid": 0, "group": "root", "md5sum": "da53c88f92b68b0487aa209f795b6bb3", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 456, "src": "/root/.ansible/tmp/ansible-tmp-1735403669.5789778-20856-168524673727792/.source.kube", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 28 December 2024 11:34:30 -0500 (0:00:00.698) 0:01:20.476 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 28 December 2024 11:34:30 -0500 (0:00:00.031) 0:01:20.508 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_copy_file is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 28 December 2024 11:34:30 -0500 (0:00:00.031) 0:01:20.539 ***** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 28 December 2024 11:34:31 -0500 (0:00:00.795) 0:01:21.334 ***** changed: [managed-node2] => { "changed": true, "name": "quadlet-demo.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "-.mount quadlet-demo-mysql.service basic.target network-online.target systemd-journald.socket sysinit.target system.slice quadlet-demo-network.service", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "multi-user.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-demo.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveTasksMax": "22349", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "2470621184", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target quadlet-demo-network.service -.mount quadlet-demo-mysql.service system.slice", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo.kube", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 28 December 2024 11:34:32 -0500 (0:00:01.319) 0:01:22.654 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Saturday 28 December 2024 11:34:32 -0500 (0:00:00.117) 0:01:22.771 ***** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Saturday 28 December 2024 11:34:32 -0500 (0:00:00.049) 0:01:22.820 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Saturday 28 December 2024 11:34:32 -0500 (0:00:00.048) 0:01:22.869 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Check quadlet files] ***************************************************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:96 Saturday 28 December 2024 11:34:32 -0500 (0:00:00.076) 0:01:22.945 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "ls", "-alrtF", "/etc/containers/systemd" ], "delta": "0:00:00.004629", "end": "2024-12-28 11:34:33.096862", "rc": 0, "start": "2024-12-28 11:34:33.092233" } STDOUT: total 24 drwxr-xr-x. 9 root root 178 Dec 28 11:30 ../ -rw-r--r--. 1 root root 74 Dec 28 11:33 quadlet-demo.network -rw-r--r--. 1 root root 9 Dec 28 11:33 quadlet-demo-mysql.volume -rw-r--r--. 1 root root 363 Dec 28 11:33 quadlet-demo-mysql.container -rw-r--r--. 1 root root 2102 Dec 28 11:34 envoy-proxy-configmap.yml -rw-r--r--. 1 root root 1605 Dec 28 11:34 quadlet-demo.yml -rw-r--r--. 1 root root 456 Dec 28 11:34 quadlet-demo.kube drwxr-xr-x. 2 root root 185 Dec 28 11:34 ./ TASK [Check containers] ******************************************************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:100 Saturday 28 December 2024 11:34:33 -0500 (0:00:00.474) 0:01:23.420 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "-a" ], "delta": "0:00:00.062754", "end": "2024-12-28 11:34:33.587274", "failed_when_result": false, "rc": 0, "start": "2024-12-28 11:34:33.524520" } STDOUT: CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES 56d61cd416db localhost/podman-pause:5.3.1-1733097600 About a minute ago Up About a minute 001056497135-service d288a7e88646 localhost/podman-pause:5.3.1-1733097600 About a minute ago Up About a minute 0.0.0.0:15002->80/tcp c97b04ddc09a-infra 3b2e8426a107 quay.io/libpod/testimage:20210610 About a minute ago Up About a minute 0.0.0.0:15002->80/tcp httpd2-httpd2 0a5a42555773 localhost/podman-pause:5.3.1-1733097600 About a minute ago Up About a minute 61ef7cf78877-service 4c761524f51e localhost/podman-pause:5.3.1-1733097600 About a minute ago Up About a minute 0.0.0.0:15003->80/tcp 00b5192a1328-infra aa8d45554bfc quay.io/libpod/testimage:20210610 About a minute ago Up About a minute 0.0.0.0:15003->80/tcp httpd3-httpd3 38908400944e quay.io/linux-system-roles/mysql:5.6 mysqld 32 seconds ago Up 33 seconds (healthy) 3306/tcp quadlet-demo-mysql 421487fe4389 localhost/podman-pause:5.3.1-1733097600 1 second ago Up 2 seconds a96f3a51b8d1-service 6e505f6ec50b localhost/podman-pause:5.3.1-1733097600 1 second ago Up 1 second 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp 55132399920f-infra dcf01e22c906 quay.io/linux-system-roles/wordpress:4.8-apache apache2-foregroun... 1 second ago Up 1 second 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp, 80/tcp quadlet-demo-wordpress e1dba157116c quay.io/linux-system-roles/envoyproxy:v1.25.0 envoy -c /etc/env... 1 second ago Up 1 second 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp, 10000/tcp quadlet-demo-envoy TASK [Check volumes] *********************************************************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:105 Saturday 28 December 2024 11:34:33 -0500 (0:00:00.486) 0:01:23.906 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls" ], "delta": "0:00:00.043080", "end": "2024-12-28 11:34:34.087039", "failed_when_result": false, "rc": 0, "start": "2024-12-28 11:34:34.043959" } STDOUT: DRIVER VOLUME NAME local systemd-quadlet-demo-mysql local wp-pv-claim local envoy-proxy-config local envoy-certificates TASK [Check pods] ************************************************************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:110 Saturday 28 December 2024 11:34:34 -0500 (0:00:00.529) 0:01:24.436 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "pod", "ps", "--ctr-ids", "--ctr-names", "--ctr-status" ], "delta": "0:00:00.039155", "end": "2024-12-28 11:34:34.571609", "failed_when_result": false, "rc": 0, "start": "2024-12-28 11:34:34.532454" } STDOUT: POD ID NAME STATUS CREATED INFRA ID IDS NAMES STATUS 55132399920f quadlet-demo Running 2 seconds ago 6e505f6ec50b 6e505f6ec50b,dcf01e22c906,e1dba157116c 55132399920f-infra,quadlet-demo-wordpress,quadlet-demo-envoy running,running,running 00b5192a1328 httpd3 Running About a minute ago 4c761524f51e 4c761524f51e,aa8d45554bfc 00b5192a1328-infra,httpd3-httpd3 running,running c97b04ddc09a httpd2 Running About a minute ago d288a7e88646 d288a7e88646,3b2e8426a107 c97b04ddc09a-infra,httpd2-httpd2 running,running TASK [Check systemd] *********************************************************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:115 Saturday 28 December 2024 11:34:34 -0500 (0:00:00.466) 0:01:24.902 ***** ok: [managed-node2] => { "changed": false, "cmd": "set -euo pipefail; systemctl list-units | grep quadlet", "delta": "0:00:00.013867", "end": "2024-12-28 11:34:34.997387", "failed_when_result": false, "rc": 0, "start": "2024-12-28 11:34:34.983520" } STDOUT: quadlet-demo-mysql-volume.service loaded active exited quadlet-demo-mysql-volume.service quadlet-demo-mysql.service loaded active running quadlet-demo-mysql.service quadlet-demo-network.service loaded active exited quadlet-demo-network.service quadlet-demo.service loaded active running quadlet-demo.service TASK [Check web] *************************************************************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:121 Saturday 28 December 2024 11:34:35 -0500 (0:00:00.424) 0:01:25.327 ***** FAILED - RETRYING: [managed-node2]: Check web (6 retries left). FAILED - RETRYING: [managed-node2]: Check web (5 retries left). FAILED - RETRYING: [managed-node2]: Check web (4 retries left). FAILED - RETRYING: [managed-node2]: Check web (3 retries left). FAILED - RETRYING: [managed-node2]: Check web (2 retries left). FAILED - RETRYING: [managed-node2]: Check web (1 retries left). fatal: [managed-node2]: FAILED! => { "attempts": 6, "changed": false, "dest": "/run/out", "elapsed": 0, "url": "https://localhost:8000" } MSG: Request failed: TASK [Dump journal] ************************************************************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:142 Saturday 28 December 2024 11:35:08 -0500 (0:00:33.467) 0:01:58.795 ***** fatal: [managed-node2]: FAILED! => { "changed": false, "cmd": [ "journalctl", "-ex" ], "delta": "0:00:00.032278", "end": "2024-12-28 11:35:08.885965", "failed_when_result": true, "rc": 0, "start": "2024-12-28 11:35:08.853687" } STDOUT: Dec 28 11:29:13 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1 Dec 28 11:29:13 managed-node2 kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Dec 28 11:29:13 managed-node2 kernel: SELinux: policy capability ioctl_skip_cloexec=0 Dec 28 11:29:13 managed-node2 kernel: SELinux: policy capability userspace_initial_context=0 Dec 28 11:29:21 managed-node2 kernel: SELinux: Converting 391 SID table entries... Dec 28 11:29:21 managed-node2 kernel: SELinux: policy capability network_peer_controls=1 Dec 28 11:29:21 managed-node2 kernel: SELinux: policy capability open_perms=1 Dec 28 11:29:21 managed-node2 kernel: SELinux: policy capability extended_socket_class=1 Dec 28 11:29:21 managed-node2 kernel: SELinux: policy capability always_check_network=0 Dec 28 11:29:21 managed-node2 kernel: SELinux: policy capability cgroup_seclabel=1 Dec 28 11:29:21 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1 Dec 28 11:29:21 managed-node2 kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Dec 28 11:29:21 managed-node2 kernel: SELinux: policy capability ioctl_skip_cloexec=0 Dec 28 11:29:21 managed-node2 kernel: SELinux: policy capability userspace_initial_context=0 Dec 28 11:29:22 managed-node2 groupadd[8882]: group added to /etc/group: name=polkitd, GID=114 Dec 28 11:29:22 managed-node2 groupadd[8882]: group added to /etc/gshadow: name=polkitd Dec 28 11:29:22 managed-node2 groupadd[8882]: new group: name=polkitd, GID=114 Dec 28 11:29:22 managed-node2 useradd[8885]: new user: name=polkitd, UID=114, GID=114, home=/, shell=/sbin/nologin, from=none Dec 28 11:29:22 managed-node2 dbus-broker-launch[615]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Dec 28 11:29:22 managed-node2 dbus-broker-launch[615]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Dec 28 11:29:22 managed-node2 dbus-broker-launch[615]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Dec 28 11:29:22 managed-node2 systemd[1]: Listening on pcscd.socket - PC/SC Smart Card Daemon Activation Socket. ░░ Subject: A start job for unit pcscd.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pcscd.socket has finished successfully. ░░ ░░ The job identifier is 1255. Dec 28 11:29:41 managed-node2 systemd[1]: Started run-r2e9115bf38a945f2b9622dfa60da2341.service - /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-r2e9115bf38a945f2b9622dfa60da2341.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-r2e9115bf38a945f2b9622dfa60da2341.service has finished successfully. ░░ ░░ The job identifier is 1333. Dec 28 11:29:41 managed-node2 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 1411. Dec 28 11:29:41 managed-node2 systemctl[9577]: Warning: The unit file, source configuration file or drop-ins of man-db-cache-update.service changed on disk. Run 'systemctl daemon-reload' to reload units. Dec 28 11:29:41 managed-node2 systemd[1]: Reload requested from client PID 9580 ('systemctl') (unit session-5.scope)... Dec 28 11:29:41 managed-node2 systemd[1]: Reloading... Dec 28 11:29:42 managed-node2 systemd[1]: Reloading finished in 219 ms. Dec 28 11:29:42 managed-node2 systemd[1]: Queuing reload/restart jobs for marked units… Dec 28 11:29:43 managed-node2 sudo[8749]: pam_unix(sudo:session): session closed for user root Dec 28 11:29:44 managed-node2 python3.12[10044]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:29:45 managed-node2 python3.12[10182]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Dec 28 11:29:45 managed-node2 python3.12[10314]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:29:47 managed-node2 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Dec 28 11:29:47 managed-node2 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 1411. Dec 28 11:29:47 managed-node2 systemd[1]: run-r2e9115bf38a945f2b9622dfa60da2341.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-r2e9115bf38a945f2b9622dfa60da2341.service has successfully entered the 'dead' state. Dec 28 11:29:48 managed-node2 python3.12[10451]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:29:48 managed-node2 python3.12[10582]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:29:49 managed-node2 python3.12[10713]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 28 11:29:51 managed-node2 python3.12[10845]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Dec 28 11:29:51 managed-node2 python3.12[10978]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Dec 28 11:29:51 managed-node2 systemd[1]: Reload requested from client PID 10981 ('systemctl') (unit session-5.scope)... Dec 28 11:29:51 managed-node2 systemd[1]: Reloading... Dec 28 11:29:51 managed-node2 systemd[1]: Reloading finished in 189 ms. Dec 28 11:29:51 managed-node2 systemd[1]: Starting firewalld.service - firewalld - dynamic firewall daemon... ░░ Subject: A start job for unit firewalld.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit firewalld.service has begun execution. ░░ ░░ The job identifier is 1489. Dec 28 11:29:52 managed-node2 systemd[1]: Started firewalld.service - firewalld - dynamic firewall daemon. ░░ Subject: A start job for unit firewalld.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit firewalld.service has finished successfully. ░░ ░░ The job identifier is 1489. Dec 28 11:29:52 managed-node2 kernel: Warning: Unmaintained driver is detected: ip_set Dec 28 11:29:53 managed-node2 systemd[1]: Starting polkit.service - Authorization Manager... ░░ Subject: A start job for unit polkit.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit polkit.service has begun execution. ░░ ░░ The job identifier is 1573. Dec 28 11:29:53 managed-node2 python3.12[11197]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Dec 28 11:29:53 managed-node2 polkitd[11210]: Started polkitd version 125 Dec 28 11:29:53 managed-node2 rsyslogd[655]: imjournal: journal files changed, reloading... [v8.2408.0-2.el10 try https://www.rsyslog.com/e/0 ] Dec 28 11:29:53 managed-node2 systemd[1]: Started polkit.service - Authorization Manager. ░░ Subject: A start job for unit polkit.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit polkit.service has finished successfully. ░░ ░░ The job identifier is 1573. Dec 28 11:29:54 managed-node2 python3.12[11351]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:29:55 managed-node2 python3.12[11482]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:29:55 managed-node2 python3.12[11613]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 28 11:29:56 managed-node2 python3.12[11745]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 28 11:29:57 managed-node2 dbus-broker-launch[615]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Dec 28 11:29:57 managed-node2 dbus-broker-launch[615]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Dec 28 11:29:57 managed-node2 systemd[1]: Started run-r6f2c4c63e0af43aba31dd057652254e2.service - /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-r6f2c4c63e0af43aba31dd057652254e2.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-r6f2c4c63e0af43aba31dd057652254e2.service has finished successfully. ░░ ░░ The job identifier is 1654. Dec 28 11:29:57 managed-node2 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 1732. Dec 28 11:29:58 managed-node2 python3.12[11883]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Dec 28 11:29:58 managed-node2 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Dec 28 11:29:58 managed-node2 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 1732. Dec 28 11:29:58 managed-node2 systemd[1]: run-r6f2c4c63e0af43aba31dd057652254e2.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-r6f2c4c63e0af43aba31dd057652254e2.service has successfully entered the 'dead' state. Dec 28 11:30:00 managed-node2 python3.12[12047]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Dec 28 11:30:01 managed-node2 kernel: SELinux: Converting 473 SID table entries... Dec 28 11:30:01 managed-node2 kernel: SELinux: policy capability network_peer_controls=1 Dec 28 11:30:01 managed-node2 kernel: SELinux: policy capability open_perms=1 Dec 28 11:30:01 managed-node2 kernel: SELinux: policy capability extended_socket_class=1 Dec 28 11:30:01 managed-node2 kernel: SELinux: policy capability always_check_network=0 Dec 28 11:30:01 managed-node2 kernel: SELinux: policy capability cgroup_seclabel=1 Dec 28 11:30:01 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1 Dec 28 11:30:01 managed-node2 kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Dec 28 11:30:01 managed-node2 kernel: SELinux: policy capability ioctl_skip_cloexec=0 Dec 28 11:30:01 managed-node2 kernel: SELinux: policy capability userspace_initial_context=0 Dec 28 11:30:01 managed-node2 python3.12[12182]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Dec 28 11:30:05 managed-node2 python3.12[12313]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:30:07 managed-node2 python3.12[12446]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:30:08 managed-node2 python3.12[12577]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:30:08 managed-node2 python3.12[12708]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 28 11:30:09 managed-node2 python3.12[12813]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/nopull.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1735403408.4179106-8990-12269750223039/.source.yml _original_basename=.jj_bgtl2 follow=False checksum=d5dc917e3cae36de03aa971a17ac473f86fdf934 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:30:09 managed-node2 python3.12[12944]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Dec 28 11:30:09 managed-node2 systemd[1]: var-lib-containers-storage-overlay-compat1538861670-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-compat1538861670-merged.mount has successfully entered the 'dead' state. Dec 28 11:30:09 managed-node2 kernel: evm: overlay not supported Dec 28 11:30:09 managed-node2 systemd[1]: var-lib-containers-storage-overlay-metacopy\x2dcheck3921600250-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-metacopy\x2dcheck3921600250-merged.mount has successfully entered the 'dead' state. Dec 28 11:30:09 managed-node2 podman[12951]: 2024-12-28 11:30:09.864510992 -0500 EST m=+0.164481913 system refresh Dec 28 11:30:10 managed-node2 podman[12951]: 2024-12-28 11:30:10.136093286 -0500 EST m=+0.436064263 image build 6c58b1756bb7cf9553687745e2a74f39255a6385ad87c23e911e6951449ed327 Dec 28 11:30:10 managed-node2 systemd[1]: Created slice machine.slice - Slice /machine. ░░ Subject: A start job for unit machine.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine.slice has finished successfully. ░░ ░░ The job identifier is 1812. Dec 28 11:30:10 managed-node2 systemd[1]: Created slice machine-libpod_pod_d9899c53501b599ec0ca4ce47a305ac0c0246057883ca370624160d2a1b7896a.slice - cgroup machine-libpod_pod_d9899c53501b599ec0ca4ce47a305ac0c0246057883ca370624160d2a1b7896a.slice. ░░ Subject: A start job for unit machine-libpod_pod_d9899c53501b599ec0ca4ce47a305ac0c0246057883ca370624160d2a1b7896a.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_d9899c53501b599ec0ca4ce47a305ac0c0246057883ca370624160d2a1b7896a.slice has finished successfully. ░░ ░░ The job identifier is 1811. Dec 28 11:30:10 managed-node2 podman[12951]: 2024-12-28 11:30:10.183522559 -0500 EST m=+0.483493452 container create ac11033f777aa9d217c13c8f64bc54df311154923605ee127895f4bb01f345a5 (image=localhost/podman-pause:5.3.1-1733097600, name=d9899c53501b-infra, pod_id=d9899c53501b599ec0ca4ce47a305ac0c0246057883ca370624160d2a1b7896a, io.buildah.version=1.38.0) Dec 28 11:30:10 managed-node2 podman[12951]: 2024-12-28 11:30:10.187936408 -0500 EST m=+0.487907302 pod create d9899c53501b599ec0ca4ce47a305ac0c0246057883ca370624160d2a1b7896a (image=, name=nopull) Dec 28 11:30:12 managed-node2 podman[12951]: 2024-12-28 11:30:12.416765682 -0500 EST m=+2.716736577 container create aad704eb4fecc412d41c3fc370bbeac6b4eadb09ac1cd38a0e954309102e8511 (image=quay.io/libpod/testimage:20210610, name=nopull-nopull, pod_id=d9899c53501b599ec0ca4ce47a305ac0c0246057883ca370624160d2a1b7896a, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Dec 28 11:30:12 managed-node2 podman[12951]: 2024-12-28 11:30:12.395588751 -0500 EST m=+2.695559776 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Dec 28 11:30:12 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Dec 28 11:30:14 managed-node2 python3.12[13281]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:30:16 managed-node2 python3.12[13418]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:30:19 managed-node2 python3.12[13551]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 28 11:30:21 managed-node2 python3.12[13683]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Dec 28 11:30:21 managed-node2 python3.12[13816]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Dec 28 11:30:22 managed-node2 python3.12[13949]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Dec 28 11:30:24 managed-node2 python3.12[14080]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 28 11:30:25 managed-node2 python3.12[14212]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 28 11:30:26 managed-node2 python3.12[14344]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Dec 28 11:30:28 managed-node2 python3.12[14504]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Dec 28 11:30:29 managed-node2 python3.12[14635]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Dec 28 11:30:34 managed-node2 python3.12[14766]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:30:36 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Dec 28 11:30:37 managed-node2 podman[14908]: 2024-12-28 11:30:37.014890346 -0500 EST m=+0.258360081 image pull-error quay.io/linux-system-roles/this_is_a_bogus_image:latest initializing source docker://quay.io/linux-system-roles/this_is_a_bogus_image:latest: reading manifest latest in quay.io/linux-system-roles/this_is_a_bogus_image: unauthorized: access to the requested resource is not authorized Dec 28 11:30:37 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Dec 28 11:30:37 managed-node2 python3.12[15046]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:30:38 managed-node2 python3.12[15177]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:30:38 managed-node2 python3.12[15308]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 28 11:30:38 managed-node2 python3.12[15413]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/bogus.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1735403438.2113347-10339-78909536516433/.source.yml _original_basename=.9ios6n1k follow=False checksum=f8266a972ed3be7e204d2a67883fe3a22b8dbf18 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:30:39 managed-node2 python3.12[15544]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Dec 28 11:30:39 managed-node2 systemd[1]: Created slice machine-libpod_pod_58826ba81214282b6376d90c8e6eb337d6ff18a73fea5b0f082e4d35c8ae1d47.slice - cgroup machine-libpod_pod_58826ba81214282b6376d90c8e6eb337d6ff18a73fea5b0f082e4d35c8ae1d47.slice. ░░ Subject: A start job for unit machine-libpod_pod_58826ba81214282b6376d90c8e6eb337d6ff18a73fea5b0f082e4d35c8ae1d47.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_58826ba81214282b6376d90c8e6eb337d6ff18a73fea5b0f082e4d35c8ae1d47.slice has finished successfully. ░░ ░░ The job identifier is 1817. Dec 28 11:30:39 managed-node2 podman[15551]: 2024-12-28 11:30:39.494191293 -0500 EST m=+0.059570181 container create f0e9e0de6186a585e9881b1d2dfc0ec794232fd2dc60a6632875b53a67f604ff (image=localhost/podman-pause:5.3.1-1733097600, name=58826ba81214-infra, pod_id=58826ba81214282b6376d90c8e6eb337d6ff18a73fea5b0f082e4d35c8ae1d47, io.buildah.version=1.38.0) Dec 28 11:30:39 managed-node2 podman[15551]: 2024-12-28 11:30:39.499249841 -0500 EST m=+0.064628596 pod create 58826ba81214282b6376d90c8e6eb337d6ff18a73fea5b0f082e4d35c8ae1d47 (image=, name=bogus) Dec 28 11:30:39 managed-node2 podman[15551]: 2024-12-28 11:30:39.759173006 -0500 EST m=+0.324551783 image pull-error quay.io/linux-system-roles/this_is_a_bogus_image:latest initializing source docker://quay.io/linux-system-roles/this_is_a_bogus_image:latest: reading manifest latest in quay.io/linux-system-roles/this_is_a_bogus_image: unauthorized: access to the requested resource is not authorized Dec 28 11:30:39 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Dec 28 11:30:42 managed-node2 python3.12[15820]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:30:43 managed-node2 python3.12[15957]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:30:46 managed-node2 python3.12[16091]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 28 11:30:47 managed-node2 python3.12[16223]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Dec 28 11:30:48 managed-node2 python3.12[16356]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Dec 28 11:30:49 managed-node2 python3.12[16489]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Dec 28 11:30:51 managed-node2 python3.12[16620]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 28 11:30:51 managed-node2 python3.12[16752]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 28 11:30:52 managed-node2 python3.12[16884]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Dec 28 11:30:54 managed-node2 python3.12[17044]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Dec 28 11:30:55 managed-node2 python3.12[17175]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Dec 28 11:31:00 managed-node2 python3.12[17306]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:31:02 managed-node2 python3.12[17439]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/nopull.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:31:03 managed-node2 python3.12[17571]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-nopull.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Dec 28 11:31:03 managed-node2 python3.12[17704]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:31:04 managed-node2 python3.12[17837]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Dec 28 11:31:04 managed-node2 python3.12[17837]: ansible-containers.podman.podman_play version: 5.3.1, kube file /etc/containers/ansible-kubernetes.d/nopull.yml Dec 28 11:31:04 managed-node2 podman[17845]: 2024-12-28 11:31:04.781922318 -0500 EST m=+0.026363011 pod stop d9899c53501b599ec0ca4ce47a305ac0c0246057883ca370624160d2a1b7896a (image=, name=nopull) Dec 28 11:31:04 managed-node2 systemd[1]: Removed slice machine-libpod_pod_d9899c53501b599ec0ca4ce47a305ac0c0246057883ca370624160d2a1b7896a.slice - cgroup machine-libpod_pod_d9899c53501b599ec0ca4ce47a305ac0c0246057883ca370624160d2a1b7896a.slice. ░░ Subject: A stop job for unit machine-libpod_pod_d9899c53501b599ec0ca4ce47a305ac0c0246057883ca370624160d2a1b7896a.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_d9899c53501b599ec0ca4ce47a305ac0c0246057883ca370624160d2a1b7896a.slice has finished. ░░ ░░ The job identifier is 1823 and the job result is done. Dec 28 11:31:04 managed-node2 podman[17845]: 2024-12-28 11:31:04.819170098 -0500 EST m=+0.063610667 container remove aad704eb4fecc412d41c3fc370bbeac6b4eadb09ac1cd38a0e954309102e8511 (image=quay.io/libpod/testimage:20210610, name=nopull-nopull, pod_id=d9899c53501b599ec0ca4ce47a305ac0c0246057883ca370624160d2a1b7896a, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Dec 28 11:31:04 managed-node2 podman[17845]: 2024-12-28 11:31:04.838450752 -0500 EST m=+0.082891320 container remove ac11033f777aa9d217c13c8f64bc54df311154923605ee127895f4bb01f345a5 (image=localhost/podman-pause:5.3.1-1733097600, name=d9899c53501b-infra, pod_id=d9899c53501b599ec0ca4ce47a305ac0c0246057883ca370624160d2a1b7896a, io.buildah.version=1.38.0) Dec 28 11:31:04 managed-node2 podman[17845]: 2024-12-28 11:31:04.846693042 -0500 EST m=+0.091133593 pod remove d9899c53501b599ec0ca4ce47a305ac0c0246057883ca370624160d2a1b7896a (image=, name=nopull) Dec 28 11:31:04 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Dec 28 11:31:05 managed-node2 python3.12[17984]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:31:05 managed-node2 python3.12[18115]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:31:05 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Dec 28 11:31:09 managed-node2 python3.12[18385]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:31:11 managed-node2 python3.12[18522]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:31:14 managed-node2 python3.12[18655]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 28 11:31:15 managed-node2 python3.12[18787]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Dec 28 11:31:16 managed-node2 python3.12[18920]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Dec 28 11:31:17 managed-node2 python3.12[19053]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Dec 28 11:31:19 managed-node2 python3.12[19184]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 28 11:31:20 managed-node2 systemd[1]: Starting logrotate.service - Rotate log files... ░░ Subject: A start job for unit logrotate.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has begun execution. ░░ ░░ The job identifier is 1825. Dec 28 11:31:20 managed-node2 systemd[1]: logrotate.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit logrotate.service has successfully entered the 'dead' state. Dec 28 11:31:20 managed-node2 systemd[1]: Finished logrotate.service - Rotate log files. ░░ Subject: A start job for unit logrotate.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has finished successfully. ░░ ░░ The job identifier is 1825. Dec 28 11:31:20 managed-node2 python3.12[19317]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 28 11:31:21 managed-node2 python3.12[19452]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Dec 28 11:31:23 managed-node2 python3.12[19612]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Dec 28 11:31:24 managed-node2 python3.12[19743]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Dec 28 11:31:29 managed-node2 python3.12[19874]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:31:30 managed-node2 python3.12[20007]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/bogus.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:31:31 managed-node2 python3.12[20139]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-bogus.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Dec 28 11:31:32 managed-node2 python3.12[20272]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:31:32 managed-node2 python3.12[20405]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Dec 28 11:31:32 managed-node2 python3.12[20405]: ansible-containers.podman.podman_play version: 5.3.1, kube file /etc/containers/ansible-kubernetes.d/bogus.yml Dec 28 11:31:32 managed-node2 podman[20412]: 2024-12-28 11:31:32.959275485 -0500 EST m=+0.025989496 pod stop 58826ba81214282b6376d90c8e6eb337d6ff18a73fea5b0f082e4d35c8ae1d47 (image=, name=bogus) Dec 28 11:31:32 managed-node2 systemd[1]: Removed slice machine-libpod_pod_58826ba81214282b6376d90c8e6eb337d6ff18a73fea5b0f082e4d35c8ae1d47.slice - cgroup machine-libpod_pod_58826ba81214282b6376d90c8e6eb337d6ff18a73fea5b0f082e4d35c8ae1d47.slice. ░░ Subject: A stop job for unit machine-libpod_pod_58826ba81214282b6376d90c8e6eb337d6ff18a73fea5b0f082e4d35c8ae1d47.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_58826ba81214282b6376d90c8e6eb337d6ff18a73fea5b0f082e4d35c8ae1d47.slice has finished. ░░ ░░ The job identifier is 1903 and the job result is done. Dec 28 11:31:32 managed-node2 podman[20412]: 2024-12-28 11:31:32.992977045 -0500 EST m=+0.059691023 container remove f0e9e0de6186a585e9881b1d2dfc0ec794232fd2dc60a6632875b53a67f604ff (image=localhost/podman-pause:5.3.1-1733097600, name=58826ba81214-infra, pod_id=58826ba81214282b6376d90c8e6eb337d6ff18a73fea5b0f082e4d35c8ae1d47, io.buildah.version=1.38.0) Dec 28 11:31:33 managed-node2 podman[20412]: 2024-12-28 11:31:33.00176059 -0500 EST m=+0.068474556 pod remove 58826ba81214282b6376d90c8e6eb337d6ff18a73fea5b0f082e4d35c8ae1d47 (image=, name=bogus) Dec 28 11:31:33 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Dec 28 11:31:33 managed-node2 python3.12[20552]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:31:34 managed-node2 python3.12[20683]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:31:34 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Dec 28 11:31:37 managed-node2 python3.12[20952]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:31:38 managed-node2 python3.12[21089]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:31:42 managed-node2 python3.12[21222]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 28 11:31:43 managed-node2 python3.12[21354]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Dec 28 11:31:44 managed-node2 python3.12[21487]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Dec 28 11:31:45 managed-node2 python3.12[21620]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Dec 28 11:31:47 managed-node2 python3.12[21751]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 28 11:31:48 managed-node2 python3.12[21883]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 28 11:31:49 managed-node2 python3.12[22015]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Dec 28 11:31:51 managed-node2 python3.12[22175]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Dec 28 11:31:52 managed-node2 python3.12[22306]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Dec 28 11:31:57 managed-node2 python3.12[22437]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Dec 28 11:31:57 managed-node2 python3.12[22569]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:31:58 managed-node2 python3.12[22702]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:31:58 managed-node2 python3.12[22834]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:32:00 managed-node2 python3.12[22966]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:32:00 managed-node2 python3.12[23098]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl enable-linger podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Dec 28 11:32:00 managed-node2 systemd[1]: Created slice user-3001.slice - User Slice of UID 3001. ░░ Subject: A start job for unit user-3001.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-3001.slice has finished successfully. ░░ ░░ The job identifier is 1983. Dec 28 11:32:00 managed-node2 systemd[1]: Starting user-runtime-dir@3001.service - User Runtime Directory /run/user/3001... ░░ Subject: A start job for unit user-runtime-dir@3001.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@3001.service has begun execution. ░░ ░░ The job identifier is 1905. Dec 28 11:32:00 managed-node2 systemd[1]: Finished user-runtime-dir@3001.service - User Runtime Directory /run/user/3001. ░░ Subject: A start job for unit user-runtime-dir@3001.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@3001.service has finished successfully. ░░ ░░ The job identifier is 1905. Dec 28 11:32:00 managed-node2 systemd[1]: Starting user@3001.service - User Manager for UID 3001... ░░ Subject: A start job for unit user@3001.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@3001.service has begun execution. ░░ ░░ The job identifier is 1985. Dec 28 11:32:00 managed-node2 systemd-logind[656]: New session 6 of user podman_basic_user. ░░ Subject: A new session 6 has been created for user podman_basic_user ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 6 has been created for the user podman_basic_user. ░░ ░░ The leading process of the session is 23102. Dec 28 11:32:00 managed-node2 (systemd)[23102]: pam_unix(systemd-user:session): session opened for user podman_basic_user(uid=3001) by podman_basic_user(uid=0) Dec 28 11:32:01 managed-node2 systemd[23102]: Queued start job for default target default.target. Dec 28 11:32:01 managed-node2 systemd[23102]: Created slice app.slice - User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 6. Dec 28 11:32:01 managed-node2 systemd[23102]: Started grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Dec 28 11:32:01 managed-node2 systemd[23102]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 11. Dec 28 11:32:01 managed-node2 systemd[23102]: Reached target paths.target - Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Dec 28 11:32:01 managed-node2 systemd[23102]: Reached target timers.target - Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 10. Dec 28 11:32:01 managed-node2 systemd[23102]: Starting dbus.socket - D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 9. Dec 28 11:32:01 managed-node2 systemd[23102]: Starting systemd-tmpfiles-setup.service - Create User Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 5. Dec 28 11:32:01 managed-node2 systemd[23102]: Finished systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 5. Dec 28 11:32:01 managed-node2 systemd[23102]: Listening on dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 9. Dec 28 11:32:01 managed-node2 systemd[23102]: Reached target sockets.target - Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 8. Dec 28 11:32:01 managed-node2 systemd[23102]: Reached target basic.target - Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Dec 28 11:32:01 managed-node2 systemd[23102]: Reached target default.target - Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Dec 28 11:32:01 managed-node2 systemd[1]: Started user@3001.service - User Manager for UID 3001. ░░ Subject: A start job for unit user@3001.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@3001.service has finished successfully. ░░ ░░ The job identifier is 1985. Dec 28 11:32:01 managed-node2 systemd[23102]: Startup finished in 75ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 3001 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 75683 microseconds. Dec 28 11:32:01 managed-node2 python3.12[23248]: ansible-file Invoked with path=/tmp/lsr_ib6aykrx_podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:32:02 managed-node2 python3.12[23379]: ansible-file Invoked with path=/tmp/lsr_ib6aykrx_podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:32:02 managed-node2 sudo[23552]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rgdhlmjoemtzzjbyxxchxyctylwqhfnq ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1735403522.317728-14463-56258380011442/AnsiballZ_podman_image.py' Dec 28 11:32:02 managed-node2 sudo[23552]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-23552) opened. Dec 28 11:32:02 managed-node2 sudo[23552]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Dec 28 11:32:02 managed-node2 systemd[23102]: Created slice session.slice - User Core Session Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 14. Dec 28 11:32:02 managed-node2 systemd[23102]: Starting dbus-broker.service - D-Bus User Message Bus... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Dec 28 11:32:02 managed-node2 dbus-broker-launch[23576]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Dec 28 11:32:02 managed-node2 dbus-broker-launch[23576]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Dec 28 11:32:02 managed-node2 systemd[23102]: Started dbus-broker.service - D-Bus User Message Bus. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Dec 28 11:32:02 managed-node2 dbus-broker-launch[23576]: Ready Dec 28 11:32:02 managed-node2 systemd[23102]: Created slice user.slice - Slice /user. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 20. Dec 28 11:32:02 managed-node2 systemd[23102]: Started podman-23563.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 19. Dec 28 11:32:02 managed-node2 systemd[23102]: Started podman-pause-c9301dca.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 23. Dec 28 11:32:02 managed-node2 systemd[23102]: Started podman-23578.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 27. Dec 28 11:32:04 managed-node2 systemd[23102]: Started podman-23604.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 31. Dec 28 11:32:04 managed-node2 sudo[23552]: pam_unix(sudo:session): session closed for user podman_basic_user Dec 28 11:32:05 managed-node2 python3.12[23742]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:32:05 managed-node2 python3.12[23873]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:32:06 managed-node2 python3.12[24004]: ansible-ansible.legacy.stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 28 11:32:06 managed-node2 python3.12[24109]: ansible-ansible.legacy.copy Invoked with dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml owner=podman_basic_user group=3001 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1735403525.7853103-14616-209831906785104/.source.yml _original_basename=.h8subroh follow=False checksum=16d1b24f2942ab6f5b4341d181237c71d76d7322 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:32:06 managed-node2 sudo[24282]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ogbxjrmnbrgzzxjfhubyswxitrvrggtx ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1735403526.5291655-14658-207237353690412/AnsiballZ_podman_play.py' Dec 28 11:32:06 managed-node2 sudo[24282]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-24282) opened. Dec 28 11:32:06 managed-node2 sudo[24282]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Dec 28 11:32:06 managed-node2 python3.12[24285]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Dec 28 11:32:07 managed-node2 systemd[23102]: Started podman-24292.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 35. Dec 28 11:32:07 managed-node2 systemd[23102]: Created slice user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice - cgroup user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 39. Dec 28 11:32:07 managed-node2 kernel: tun: Universal TUN/TAP device driver, 1.6 Dec 28 11:32:07 managed-node2 systemd[23102]: Started rootless-netns-405bf8df.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 43. Dec 28 11:32:07 managed-node2 kernel: bridge: filtering via arp/ip/ip6tables is no longer available by default. Update your scripts to load br_netfilter if you need this. Dec 28 11:32:07 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Dec 28 11:32:07 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Dec 28 11:32:07 managed-node2 kernel: veth0: entered allmulticast mode Dec 28 11:32:07 managed-node2 kernel: veth0: entered promiscuous mode Dec 28 11:32:07 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Dec 28 11:32:07 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Dec 28 11:32:07 managed-node2 systemd[23102]: Started run-rf8a740498c1244b69bf9eca2f69146fa.scope - /usr/libexec/podman/aardvark-dns --config /run/user/3001/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 47. Dec 28 11:32:07 managed-node2 aardvark-dns[24376]: starting aardvark on a child with pid 24377 Dec 28 11:32:07 managed-node2 aardvark-dns[24377]: Successfully parsed config Dec 28 11:32:07 managed-node2 aardvark-dns[24377]: Listen v4 ip {"podman-default-kube-network": [10.89.0.1]} Dec 28 11:32:07 managed-node2 aardvark-dns[24377]: Listen v6 ip {} Dec 28 11:32:07 managed-node2 aardvark-dns[24377]: Using the following upstream servers: [169.254.1.1:53, 10.29.169.13:53, 10.29.170.12:53] Dec 28 11:32:07 managed-node2 conmon[24392]: conmon ee4d55c4ad439a2f244f : failed to write to /proc/self/oom_score_adj: Permission denied Dec 28 11:32:07 managed-node2 systemd[23102]: Started libpod-conmon-ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 51. Dec 28 11:32:07 managed-node2 conmon[24393]: conmon ee4d55c4ad439a2f244f : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/14/attach} Dec 28 11:32:07 managed-node2 conmon[24393]: conmon ee4d55c4ad439a2f244f : terminal_ctrl_fd: 14 Dec 28 11:32:07 managed-node2 conmon[24393]: conmon ee4d55c4ad439a2f244f : winsz read side: 17, winsz write side: 18 Dec 28 11:32:07 managed-node2 systemd[23102]: Started libpod-ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 56. Dec 28 11:32:07 managed-node2 conmon[24393]: conmon ee4d55c4ad439a2f244f : container PID: 24395 Dec 28 11:32:07 managed-node2 conmon[24397]: conmon 03382f1a259308bdd4d4 : failed to write to /proc/self/oom_score_adj: Permission denied Dec 28 11:32:07 managed-node2 systemd[23102]: Started libpod-conmon-03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 61. Dec 28 11:32:07 managed-node2 conmon[24398]: conmon 03382f1a259308bdd4d4 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/13/attach} Dec 28 11:32:07 managed-node2 conmon[24398]: conmon 03382f1a259308bdd4d4 : terminal_ctrl_fd: 13 Dec 28 11:32:07 managed-node2 conmon[24398]: conmon 03382f1a259308bdd4d4 : winsz read side: 16, winsz write side: 17 Dec 28 11:32:07 managed-node2 systemd[23102]: Started libpod-03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 66. Dec 28 11:32:07 managed-node2 conmon[24398]: conmon 03382f1a259308bdd4d4 : container PID: 24400 Dec 28 11:32:07 managed-node2 python3.12[24285]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Dec 28 11:32:07 managed-node2 python3.12[24285]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod: 7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d Container: 03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2 Dec 28 11:32:07 managed-node2 python3.12[24285]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2024-12-28T11:32:06-05:00" level=info msg="/bin/podman filtering at log level debug" time="2024-12-28T11:32:06-05:00" level=debug msg="Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2024-12-28T11:32:06-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2024-12-28T11:32:06-05:00" level=info msg="Using sqlite as database backend" time="2024-12-28T11:32:06-05:00" level=debug msg="systemd-logind: Unknown object '/'." time="2024-12-28T11:32:06-05:00" level=debug msg="Using graph driver overlay" time="2024-12-28T11:32:06-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" time="2024-12-28T11:32:06-05:00" level=debug msg="Using run root /run/user/3001/containers" time="2024-12-28T11:32:06-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" time="2024-12-28T11:32:06-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" time="2024-12-28T11:32:06-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" time="2024-12-28T11:32:06-05:00" level=debug msg="Using transient store: false" time="2024-12-28T11:32:06-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2024-12-28T11:32:06-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2024-12-28T11:32:06-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2024-12-28T11:32:06-05:00" level=debug msg="Cached value indicated that metacopy is not being used" time="2024-12-28T11:32:06-05:00" level=debug msg="Cached value indicated that native-diff is usable" time="2024-12-28T11:32:06-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" time="2024-12-28T11:32:06-05:00" level=debug msg="Initializing event backend file" time="2024-12-28T11:32:06-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2024-12-28T11:32:06-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2024-12-28T11:32:06-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2024-12-28T11:32:06-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2024-12-28T11:32:06-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2024-12-28T11:32:06-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" time="2024-12-28T11:32:06-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" time="2024-12-28T11:32:06-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2024-12-28T11:32:06-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2024-12-28T11:32:06-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" time="2024-12-28T11:32:06-05:00" level=info msg="Setting parallel job count to 7" time="2024-12-28T11:32:07-05:00" level=debug msg="Successfully loaded 1 networks" time="2024-12-28T11:32:07-05:00" level=debug msg="found free device name podman1" time="2024-12-28T11:32:07-05:00" level=debug msg="found free ipv4 network subnet 10.89.0.0/24" time="2024-12-28T11:32:07-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2024-12-28T11:32:07-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-28T11:32:07-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2024-12-28T11:32:07-05:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\" does not resolve to an image ID" time="2024-12-28T11:32:07-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2024-12-28T11:32:07-05:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\" does not resolve to an image ID" time="2024-12-28T11:32:07-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2024-12-28T11:32:07-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-28T11:32:07-05:00" level=debug msg="FROM \"scratch\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are not supported" time="2024-12-28T11:32:07-05:00" level=debug msg="Check for idmapped mounts support " time="2024-12-28T11:32:07-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-28T11:32:07-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-28T11:32:07-05:00" level=debug msg="overlay: test mount indicated that volatile is being used" time="2024-12-28T11:32:07-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/361431f1ed08cdfc585930dbaa2760a965361a9e561ade0ae2cef3e4dac61ffa/empty,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/361431f1ed08cdfc585930dbaa2760a965361a9e561ade0ae2cef3e4dac61ffa/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/361431f1ed08cdfc585930dbaa2760a965361a9e561ade0ae2cef3e4dac61ffa/work,userxattr,volatile,context=\"system_u:object_r:container_file_t:s0:c76,c206\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Container ID: 0d3bc7ba555d31c991e87ce78e7ce6c5bbd8670f62fd63dd12c1d87b49814b3f" time="2024-12-28T11:32:07-05:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:copy Args:[/usr/libexec/podman/catatonit /catatonit] Flags:[] Attrs:map[] Message:COPY /usr/libexec/podman/catatonit /catatonit Heredocs:[] Original:COPY /usr/libexec/podman/catatonit /catatonit}" time="2024-12-28T11:32:07-05:00" level=debug msg="COPY []string(nil), imagebuilder.Copy{FromFS:false, From:\"\", Src:[]string{\"/usr/libexec/podman/catatonit\"}, Dest:\"/catatonit\", Download:false, Chown:\"\", Chmod:\"\", Checksum:\"\", Files:[]imagebuilder.File(nil), KeepGitDir:false, Link:false, Parents:false, Excludes:[]string(nil)}" time="2024-12-28T11:32:07-05:00" level=debug msg="EnsureContainerPath \"/\" (owner \"\", mode 0) in \"0d3bc7ba555d31c991e87ce78e7ce6c5bbd8670f62fd63dd12c1d87b49814b3f\"" time="2024-12-28T11:32:07-05:00" level=debug msg="added content file:872045bc026632654869cebc14f91fe531969760bf26925e889d9b70527c8b67" time="2024-12-28T11:32:07-05:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:entrypoint Args:[/catatonit -P] Flags:[] Attrs:map[json:true] Message:ENTRYPOINT /catatonit -P Heredocs:[] Original:ENTRYPOINT [\"/catatonit\", \"-P\"]}" time="2024-12-28T11:32:07-05:00" level=debug msg="EnsureContainerPath \"/\" (owner \"\", mode 0) in \"0d3bc7ba555d31c991e87ce78e7ce6c5bbd8670f62fd63dd12c1d87b49814b3f\"" time="2024-12-28T11:32:07-05:00" level=debug msg="COMMIT localhost/podman-pause:5.3.1-1733097600" time="2024-12-28T11:32:07-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\"" time="2024-12-28T11:32:07-05:00" level=debug msg="COMMIT \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\"" time="2024-12-28T11:32:07-05:00" level=debug msg="committing image with reference \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\" is allowed by policy" time="2024-12-28T11:32:07-05:00" level=debug msg="layer list: [\"361431f1ed08cdfc585930dbaa2760a965361a9e561ade0ae2cef3e4dac61ffa\"]" time="2024-12-28T11:32:07-05:00" level=debug msg="using \"/var/tmp/buildah1759970581\" to hold temporary data" time="2024-12-28T11:32:07-05:00" level=debug msg="Tar with options on /home/podman_basic_user/.local/share/containers/storage/overlay/361431f1ed08cdfc585930dbaa2760a965361a9e561ade0ae2cef3e4dac61ffa/diff" time="2024-12-28T11:32:07-05:00" level=debug msg="layer \"361431f1ed08cdfc585930dbaa2760a965361a9e561ade0ae2cef3e4dac61ffa\" size is 699392 bytes, uncompressed digest sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6, possibly-compressed digest sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6" time="2024-12-28T11:32:07-05:00" level=debug msg="OCIv1 config = {\"created\":\"2024-12-28T16:32:07.181184277Z\",\"architecture\":\"amd64\",\"os\":\"linux\",\"config\":{\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Entrypoint\":[\"/catatonit\",\"-P\"],\"WorkingDir\":\"/\",\"Labels\":{\"io.buildah.version\":\"1.38.0\"}},\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"]},\"history\":[{\"created\":\"2024-12-28T16:32:07.152309955Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:872045bc026632654869cebc14f91fe531969760bf26925e889d9b70527c8b67 in /catatonit \",\"empty_layer\":true},{\"created\":\"2024-12-28T16:32:07.184345282Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2024-12-28T11:32:07-05:00" level=debug msg="OCIv1 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.oci.image.manifest.v1+json\",\"config\":{\"mediaType\":\"application/vnd.oci.image.config.v1+json\",\"digest\":\"sha256:428ac09400e79967b8fc99b17ae35fbdf1a637b3886c29eadb823d54e055a980\",\"size\":685},\"layers\":[{\"mediaType\":\"application/vnd.oci.image.layer.v1.tar\",\"digest\":\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\",\"size\":699392}],\"annotations\":{\"org.opencontainers.image.base.digest\":\"\",\"org.opencontainers.image.base.name\":\"\"}}" time="2024-12-28T11:32:07-05:00" level=debug msg="Docker v2s2 config = {\"created\":\"2024-12-28T16:32:07.181184277Z\",\"container\":\"0d3bc7ba555d31c991e87ce78e7ce6c5bbd8670f62fd63dd12c1d87b49814b3f\",\"container_config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":null,\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"/\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.38.0\"}},\"config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":null,\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"/\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.38.0\"}},\"architecture\":\"amd64\",\"os\":\"linux\",\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"]},\"history\":[{\"created\":\"2024-12-28T16:32:07.152309955Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:872045bc026632654869cebc14f91fe531969760bf26925e889d9b70527c8b67 in /catatonit \",\"empty_layer\":true},{\"created\":\"2024-12-28T16:32:07.184345282Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2024-12-28T11:32:07-05:00" level=debug msg="Docker v2s2 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.docker.distribution.manifest.v2+json\",\"config\":{\"mediaType\":\"application/vnd.docker.container.image.v1+json\",\"size\":1348,\"digest\":\"sha256:0d3ba54be8a6a485e97578cafe10afb2547d3ae0763800b2e1e4bf045053253c\"},\"layers\":[{\"mediaType\":\"application/vnd.docker.image.rootfs.diff.tar\",\"size\":699392,\"digest\":\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"}]}" time="2024-12-28T11:32:07-05:00" level=debug msg="Using SQLite blob info cache at /home/podman_basic_user/.local/share/containers/cache/blob-info-cache-v1.sqlite" time="2024-12-28T11:32:07-05:00" level=debug msg="IsRunningImageAllowed for image containers-storage:" time="2024-12-28T11:32:07-05:00" level=debug msg=" Using transport \"containers-storage\" policy section \"\"" time="2024-12-28T11:32:07-05:00" level=debug msg=" Requirement 0: allowed" time="2024-12-28T11:32:07-05:00" level=debug msg="Overall: allowed" time="2024-12-28T11:32:07-05:00" level=debug msg="start reading config" time="2024-12-28T11:32:07-05:00" level=debug msg="finished reading config" time="2024-12-28T11:32:07-05:00" level=debug msg="Manifest has MIME type application/vnd.oci.image.manifest.v1+json, ordered candidate list [application/vnd.oci.image.manifest.v1+json, application/vnd.docker.distribution.manifest.v2+json, application/vnd.docker.distribution.manifest.v1+prettyjws, application/vnd.docker.distribution.manifest.v1+json]" time="2024-12-28T11:32:07-05:00" level=debug msg="... will first try using the original manifest unmodified" time="2024-12-28T11:32:07-05:00" level=debug msg="Checking if we can reuse blob sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6: general substitution = true, compression for MIME type \"application/vnd.oci.image.layer.v1.tar\" = true" time="2024-12-28T11:32:07-05:00" level=debug msg="reading layer \"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"" time="2024-12-28T11:32:07-05:00" level=debug msg="No compression detected" time="2024-12-28T11:32:07-05:00" level=debug msg="Using original blob without modification" time="2024-12-28T11:32:07-05:00" level=debug msg="Applying tar in /home/podman_basic_user/.local/share/containers/storage/overlay/83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6/diff" time="2024-12-28T11:32:07-05:00" level=debug msg="finished reading layer \"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"" time="2024-12-28T11:32:07-05:00" level=debug msg="No compression detected" time="2024-12-28T11:32:07-05:00" level=debug msg="Compression change for blob sha256:428ac09400e79967b8fc99b17ae35fbdf1a637b3886c29eadb823d54e055a980 (\"application/vnd.oci.image.config.v1+json\") not supported" time="2024-12-28T11:32:07-05:00" level=debug msg="Using original blob without modification" time="2024-12-28T11:32:07-05:00" level=debug msg="setting image creation date to 2024-12-28 16:32:07.181184277 +0000 UTC" time="2024-12-28T11:32:07-05:00" level=debug msg="created new image ID \"428ac09400e79967b8fc99b17ae35fbdf1a637b3886c29eadb823d54e055a980\" with metadata \"{}\"" time="2024-12-28T11:32:07-05:00" level=debug msg="added name \"localhost/podman-pause:5.3.1-1733097600\" to image \"428ac09400e79967b8fc99b17ae35fbdf1a637b3886c29eadb823d54e055a980\"" time="2024-12-28T11:32:07-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\"" time="2024-12-28T11:32:07-05:00" level=debug msg="printing final image id \"428ac09400e79967b8fc99b17ae35fbdf1a637b3886c29eadb823d54e055a980\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Pod using bridge network mode" time="2024-12-28T11:32:07-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice for parent user.slice and name libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d" time="2024-12-28T11:32:07-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice" time="2024-12-28T11:32:07-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice" time="2024-12-28T11:32:07-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2024-12-28T11:32:07-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-28T11:32:07-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2024-12-28T11:32:07-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@428ac09400e79967b8fc99b17ae35fbdf1a637b3886c29eadb823d54e055a980\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2024-12-28T11:32:07-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@428ac09400e79967b8fc99b17ae35fbdf1a637b3886c29eadb823d54e055a980)" time="2024-12-28T11:32:07-05:00" level=debug msg="exporting opaque data as blob \"sha256:428ac09400e79967b8fc99b17ae35fbdf1a637b3886c29eadb823d54e055a980\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Inspecting image 428ac09400e79967b8fc99b17ae35fbdf1a637b3886c29eadb823d54e055a980" time="2024-12-28T11:32:07-05:00" level=debug msg="exporting opaque data as blob \"sha256:428ac09400e79967b8fc99b17ae35fbdf1a637b3886c29eadb823d54e055a980\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Inspecting image 428ac09400e79967b8fc99b17ae35fbdf1a637b3886c29eadb823d54e055a980" time="2024-12-28T11:32:07-05:00" level=debug msg="Inspecting image 428ac09400e79967b8fc99b17ae35fbdf1a637b3886c29eadb823d54e055a980" time="2024-12-28T11:32:07-05:00" level=debug msg="using systemd mode: false" time="2024-12-28T11:32:07-05:00" level=debug msg="setting container name 7a1a570a1a21-infra" time="2024-12-28T11:32:07-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network f77e9465d7924639ca311fa0051f20b3a5be9d142b3a20f85bf2c970144501c8 bridge podman1 2024-12-28 11:32:07.00472767 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2024-12-28T11:32:07-05:00" level=debug msg="Successfully loaded 2 networks" time="2024-12-28T11:32:07-05:00" level=debug msg="Allocated lock 1 for container ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b" time="2024-12-28T11:32:07-05:00" level=debug msg="exporting opaque data as blob \"sha256:428ac09400e79967b8fc99b17ae35fbdf1a637b3886c29eadb823d54e055a980\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Created container \"ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Container \"ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b/userdata\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Container \"ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b\" has run directory \"/run/user/3001/containers/overlay-containers/ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b/userdata\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-28T11:32:07-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-28T11:32:07-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-12-28T11:32:07-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-28T11:32:07-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-12-28T11:32:07-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2024-12-28T11:32:07-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-28T11:32:07-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-28T11:32:07-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-12-28T11:32:07-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-28T11:32:07-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-12-28T11:32:07-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-28T11:32:07-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-28T11:32:07-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-12-28T11:32:07-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-28T11:32:07-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-12-28T11:32:07-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-12-28T11:32:07-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:07-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-28T11:32:07-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-28T11:32:07-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-12-28T11:32:07-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-28T11:32:07-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-12-28T11:32:07-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-12-28T11:32:07-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:07-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-12-28T11:32:07-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-12-28T11:32:07-05:00" level=debug msg="using systemd mode: false" time="2024-12-28T11:32:07-05:00" level=debug msg="adding container to pod httpd1" time="2024-12-28T11:32:07-05:00" level=debug msg="setting container name httpd1-httpd1" time="2024-12-28T11:32:07-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2024-12-28T11:32:07-05:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2024-12-28T11:32:07-05:00" level=debug msg="Adding mount /proc" time="2024-12-28T11:32:07-05:00" level=debug msg="Adding mount /dev" time="2024-12-28T11:32:07-05:00" level=debug msg="Adding mount /dev/pts" time="2024-12-28T11:32:07-05:00" level=debug msg="Adding mount /dev/mqueue" time="2024-12-28T11:32:07-05:00" level=debug msg="Adding mount /sys" time="2024-12-28T11:32:07-05:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2024-12-28T11:32:07-05:00" level=debug msg="Allocated lock 2 for container 03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2" time="2024-12-28T11:32:07-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Created container \"03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Container \"03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2/userdata\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Container \"03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2\" has run directory \"/run/user/3001/containers/overlay-containers/03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2/userdata\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Strongconnecting node ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b" time="2024-12-28T11:32:07-05:00" level=debug msg="Pushed ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b onto stack" time="2024-12-28T11:32:07-05:00" level=debug msg="Finishing node ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b. Popped ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b off stack" time="2024-12-28T11:32:07-05:00" level=debug msg="Strongconnecting node 03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2" time="2024-12-28T11:32:07-05:00" level=debug msg="Pushed 03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2 onto stack" time="2024-12-28T11:32:07-05:00" level=debug msg="Finishing node 03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2. Popped 03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2 off stack" time="2024-12-28T11:32:07-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/G7M3WBATRW5IXUOIAED7NGBVIG,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/e16fd0b0e1772b4edc3ed7bf147feb483b8414b0e0edf5be9b08a9cf0c14cd25/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/e16fd0b0e1772b4edc3ed7bf147feb483b8414b0e0edf5be9b08a9cf0c14cd25/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c860,c1021\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Made network namespace at /run/user/3001/netns/netns-e02ac7b3-b1af-7c3d-8942-5b7b98bce497 for container ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b" time="2024-12-28T11:32:07-05:00" level=debug msg="Mounted container \"ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/e16fd0b0e1772b4edc3ed7bf147feb483b8414b0e0edf5be9b08a9cf0c14cd25/merged\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Created root filesystem for container ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b at /home/podman_basic_user/.local/share/containers/storage/overlay/e16fd0b0e1772b4edc3ed7bf147feb483b8414b0e0edf5be9b08a9cf0c14cd25/merged" time="2024-12-28T11:32:07-05:00" level=debug msg="Creating rootless network namespace at \"/run/user/3001/containers/networks/rootless-netns/rootless-netns\"" time="2024-12-28T11:32:07-05:00" level=debug msg="pasta arguments: --config-net --pid /run/user/3001/containers/networks/rootless-netns/rootless-netns-conn.pid --dns-forward 169.254.1.1 -t none -u none -T none -U none --no-map-gw --quiet --netns /run/user/3001/containers/networks/rootless-netns/rootless-netns --map-guest-addr 169.254.1.2" time="2024-12-28T11:32:07-05:00" level=debug msg="The path of /etc/resolv.conf in the mount ns is \"/etc/resolv.conf\"" [DEBUG netavark::network::validation] Validating network namespace... [DEBUG netavark::commands::setup] Setting up... [INFO netavark::firewall] Using nftables firewall driver [DEBUG netavark::network::bridge] Setup network podman-default-kube-network [DEBUG netavark::network::bridge] Container interface name: eth0 with IP addresses [10.89.0.2/24] [DEBUG netavark::network::bridge] Bridge name: podman1 with IP addresses [10.89.0.1/24] [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.ip_forward to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/podman1/rp_filter to 2 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv6/conf/eth0/autoconf to 0 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/arp_notify to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/rp_filter to 2 [INFO netavark::network::netlink] Adding route (dest: 0.0.0.0/0 ,gw: 10.89.0.1, metric 100) [INFO netavark::firewall::nft] Creating container chain nv_f77e9465_10_89_0_0_nm24 [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.conf.podman1.route_localnet to 1 [DEBUG netavark::dns::aardvark] Spawning aardvark server [DEBUG netavark::dns::aardvark] start aardvark-dns: ["systemd-run", "-q", "--scope", "--user", "/usr/libexec/podman/aardvark-dns", "--config", "/run/user/3001/containers/networks/aardvark-dns", "-p", "53", "run"] [DEBUG netavark::commands::setup] { "podman-default-kube-network": StatusBlock { dns_search_domains: Some( [ "dns.podman", ], ), dns_server_ips: Some( [ 10.89.0.1, ], ), interfaces: Some( { "eth0": NetInterface { mac_address: "a2:dd:67:c6:99:cc", subnets: Some( [ NetAddress { gateway: Some( 10.89.0.1, ), ipnet: 10.89.0.2/24, }, ], ), }, }, ), }, } [DEBUG netavark::commands::setup] Setup complete time="2024-12-28T11:32:07-05:00" level=debug msg="rootlessport: time=\"2024-12-28T11:32:07-05:00\" level=info msg=\"Starting parent driver\"\n" time="2024-12-28T11:32:07-05:00" level=debug msg="rootlessport: time=\"2024-12-28T11:32:07-05:00\" level=info msg=\"opaque=map[builtin.readypipepath:/run/user/3001/libpod/tmp/rootlessport911175231/.bp-ready.pipe builtin.socketpath:/run/user/3001/libpod/tmp/rootlessport911175231/.bp.sock]\"\n" time="2024-12-28T11:32:07-05:00" level=debug msg="rootlessport: time=\"2024-12-28T11:32:07-05:00\" level=info msg=\"Starting child driver in child netns (\\\"/proc/self/exe\\\" [rootlessport-child])\"\n" time="2024-12-28T11:32:07-05:00" level=debug msg="rootlessport: time=\"2024-12-28T11:32:07-05:00\" level=info msg=\"Waiting for initComplete\"\n" time="2024-12-28T11:32:07-05:00" level=debug msg="rootlessport is ready" time="2024-12-28T11:32:07-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2024-12-28T11:32:07-05:00" level=debug msg="Setting Cgroups for container ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b to user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice:libpod:ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b" time="2024-12-28T11:32:07-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2024-12-28T11:32:07-05:00" level=debug msg="Workdir \"/\" resolved to host path \"/home/podman_basic_user/.local/share/containers/storage/overlay/e16fd0b0e1772b4edc3ed7bf147feb483b8414b0e0edf5be9b08a9cf0c14cd25/merged\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Created OCI spec for container ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b/userdata/config.json" time="2024-12-28T11:32:07-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice for parent user.slice and name libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d" time="2024-12-28T11:32:07-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice" time="2024-12-28T11:32:07-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice" time="2024-12-28T11:32:07-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2024-12-28T11:32:07-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b -u ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b -r /usr/bin/crun -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b/userdata -p /run/user/3001/containers/overlay-containers/ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b/userdata/pidfile -n 7a1a570a1a21-infra --exit-dir /run/user/3001/libpod/tmp/exits --persist-dir /run/user/3001/libpod/tmp/persist/ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b --full-attach -s -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b]" time="2024-12-28T11:32:07-05:00" level=info msg="Running conmon under slice user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice and unitName libpod-conmon-ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b.scope" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2024-12-28T11:32:07-05:00" level=debug msg="Received: 24395" time="2024-12-28T11:32:07-05:00" level=info msg="Got Conmon PID as 24393" time="2024-12-28T11:32:07-05:00" level=debug msg="Created container ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b in OCI runtime" time="2024-12-28T11:32:07-05:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2024-12-28T11:32:07-05:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2024-12-28T11:32:07-05:00" level=debug msg="Starting container ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b with command [/catatonit -P]" time="2024-12-28T11:32:07-05:00" level=debug msg="Started container ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b" time="2024-12-28T11:32:07-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/JLDUA4NZGI76ZRR5HOENKUUFPL,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/b8dd3fe1cea54d982422f8df2088f813356c2938779d247908531112cfbc0cc4/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/b8dd3fe1cea54d982422f8df2088f813356c2938779d247908531112cfbc0cc4/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c860,c1021\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Mounted container \"03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/b8dd3fe1cea54d982422f8df2088f813356c2938779d247908531112cfbc0cc4/merged\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Created root filesystem for container 03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2 at /home/podman_basic_user/.local/share/containers/storage/overlay/b8dd3fe1cea54d982422f8df2088f813356c2938779d247908531112cfbc0cc4/merged" time="2024-12-28T11:32:07-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2024-12-28T11:32:07-05:00" level=debug msg="Setting Cgroups for container 03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2 to user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice:libpod:03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2" time="2024-12-28T11:32:07-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2024-12-28T11:32:07-05:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2024-12-28T11:32:07-05:00" level=debug msg="Created OCI spec for container 03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2/userdata/config.json" time="2024-12-28T11:32:07-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice for parent user.slice and name libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d" time="2024-12-28T11:32:07-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice" time="2024-12-28T11:32:07-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice" time="2024-12-28T11:32:07-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2024-12-28T11:32:07-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c 03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2 -u 03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2 -r /usr/bin/crun -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2/userdata -p /run/user/3001/containers/overlay-containers/03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2/userdata/pidfile -n httpd1-httpd1 --exit-dir /run/user/3001/libpod/tmp/exits --persist-dir /run/user/3001/libpod/tmp/persist/03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2 --full-attach -s -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2]" time="2024-12-28T11:32:07-05:00" level=info msg="Running conmon under slice user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice and unitName libpod-conmon-03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2.scope" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2024-12-28T11:32:07-05:00" level=debug msg="Received: 24400" time="2024-12-28T11:32:07-05:00" level=info msg="Got Conmon PID as 24398" time="2024-12-28T11:32:07-05:00" level=debug msg="Created container 03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2 in OCI runtime" time="2024-12-28T11:32:07-05:00" level=debug msg="Starting container 03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2 with command [/bin/busybox-extras httpd -f -p 80]" time="2024-12-28T11:32:07-05:00" level=debug msg="Started container 03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2" time="2024-12-28T11:32:07-05:00" level=debug msg="Called kube.PersistentPostRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2024-12-28T11:32:07-05:00" level=debug msg="Shutting down engines" time="2024-12-28T11:32:07-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=24292 Dec 28 11:32:07 managed-node2 python3.12[24285]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Dec 28 11:32:07 managed-node2 sudo[24282]: pam_unix(sudo:session): session closed for user podman_basic_user Dec 28 11:32:08 managed-node2 sudo[24574]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nbvwsesvdxwwxbvavqduinwrpbjqzjgm ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1735403527.9234824-14730-128175966223340/AnsiballZ_systemd.py' Dec 28 11:32:08 managed-node2 sudo[24574]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-24574) opened. Dec 28 11:32:08 managed-node2 sudo[24574]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Dec 28 11:32:08 managed-node2 python3.12[24577]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 28 11:32:08 managed-node2 systemd[23102]: Reload requested from client PID 24578 ('systemctl')... Dec 28 11:32:08 managed-node2 systemd[23102]: Reloading... Dec 28 11:32:08 managed-node2 systemd[23102]: Reloading finished in 46 ms. Dec 28 11:32:08 managed-node2 sudo[24574]: pam_unix(sudo:session): session closed for user podman_basic_user Dec 28 11:32:08 managed-node2 sudo[24760]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rsufcjaytzfuvsdvktjmvixagihcdkaj ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1735403528.7678657-14768-215863873770949/AnsiballZ_systemd.py' Dec 28 11:32:08 managed-node2 sudo[24760]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-24760) opened. Dec 28 11:32:09 managed-node2 sudo[24760]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Dec 28 11:32:09 managed-node2 python3.12[24763]: ansible-systemd Invoked with name=podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service scope=user enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Dec 28 11:32:09 managed-node2 systemd[23102]: Reload requested from client PID 24766 ('systemctl')... Dec 28 11:32:09 managed-node2 systemd[23102]: Reloading... Dec 28 11:32:09 managed-node2 systemd[23102]: Reloading finished in 45 ms. Dec 28 11:32:09 managed-node2 sudo[24760]: pam_unix(sudo:session): session closed for user podman_basic_user Dec 28 11:32:09 managed-node2 sudo[24948]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wjayerfcwttzlpplhdotmgqxfpzwtmsn ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1735403529.5767055-14819-31138203494993/AnsiballZ_systemd.py' Dec 28 11:32:09 managed-node2 sudo[24948]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-24948) opened. Dec 28 11:32:09 managed-node2 sudo[24948]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Dec 28 11:32:10 managed-node2 python3.12[24951]: ansible-systemd Invoked with name=podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Dec 28 11:32:10 managed-node2 systemd[23102]: Created slice app-podman\x2dkube.slice - Slice /app/podman-kube. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 72. Dec 28 11:32:10 managed-node2 systemd[23102]: Starting podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 71. Dec 28 11:32:10 managed-node2 conmon[24393]: conmon ee4d55c4ad439a2f244f : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice/libpod-ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b.scope/container/memory.events Dec 28 11:32:10 managed-node2 aardvark-dns[24377]: Received SIGHUP Dec 28 11:32:10 managed-node2 aardvark-dns[24377]: Successfully parsed config Dec 28 11:32:10 managed-node2 aardvark-dns[24377]: Listen v4 ip {} Dec 28 11:32:10 managed-node2 aardvark-dns[24377]: Listen v6 ip {} Dec 28 11:32:10 managed-node2 aardvark-dns[24377]: No configuration found stopping the sever Dec 28 11:32:10 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Dec 28 11:32:10 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Dec 28 11:32:10 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Dec 28 11:32:10 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b)" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=info msg="Using sqlite as database backend" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="systemd-logind: Unknown object '/'." Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Using graph driver overlay" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Using run root /run/user/3001/containers" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Using transient store: false" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Cached value indicated that overlay is supported" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Cached value indicated that overlay is supported" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Cached value indicated that metacopy is not being used" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Cached value indicated that native-diff is usable" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Initializing event backend file" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=info msg="Setting parallel job count to 7" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b)" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Shutting down engines" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=24964 Dec 28 11:32:20 managed-node2 podman[24954]: time="2024-12-28T11:32:20-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd1-httpd1 in 10 seconds, resorting to SIGKILL" Dec 28 11:32:20 managed-node2 conmon[24398]: conmon 03382f1a259308bdd4d4 : container 24400 exited with status 137 Dec 28 11:32:20 managed-node2 /usr/bin/podman[24983]: time="2024-12-28T11:32:20-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only 03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2)" Dec 28 11:32:20 managed-node2 /usr/bin/podman[24983]: time="2024-12-28T11:32:20-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Dec 28 11:32:20 managed-node2 /usr/bin/podman[24983]: time="2024-12-28T11:32:20-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Dec 28 11:32:20 managed-node2 /usr/bin/podman[24983]: time="2024-12-28T11:32:20-05:00" level=info msg="Using sqlite as database backend" Dec 28 11:32:20 managed-node2 /usr/bin/podman[24983]: time="2024-12-28T11:32:20-05:00" level=debug msg="systemd-logind: Unknown object '/'." Dec 28 11:32:20 managed-node2 /usr/bin/podman[24983]: time="2024-12-28T11:32:20-05:00" level=debug msg="Using graph driver overlay" Dec 28 11:32:20 managed-node2 /usr/bin/podman[24983]: time="2024-12-28T11:32:20-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" Dec 28 11:32:20 managed-node2 /usr/bin/podman[24983]: time="2024-12-28T11:32:20-05:00" level=debug msg="Using run root /run/user/3001/containers" Dec 28 11:32:20 managed-node2 /usr/bin/podman[24983]: time="2024-12-28T11:32:20-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Dec 28 11:32:20 managed-node2 /usr/bin/podman[24983]: time="2024-12-28T11:32:20-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Dec 28 11:32:20 managed-node2 /usr/bin/podman[24983]: time="2024-12-28T11:32:20-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Dec 28 11:32:20 managed-node2 /usr/bin/podman[24983]: time="2024-12-28T11:32:20-05:00" level=debug msg="Using transient store: false" Dec 28 11:32:20 managed-node2 systemd[23102]: Stopping libpod-conmon-03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2.scope... ░░ Subject: A stop job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has begun execution. ░░ ░░ The job identifier is 85. Dec 28 11:32:20 managed-node2 systemd[23102]: Stopped libpod-conmon-03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2.scope. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 85 and the job result is done. Dec 28 11:32:20 managed-node2 systemd[23102]: Removed slice user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice - cgroup user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 84 and the job result is done. Dec 28 11:32:20 managed-node2 systemd[23102]: user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice: Failed to open /run/user/3001/systemd/transient/user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice: No such file or directory Dec 28 11:32:20 managed-node2 podman[24954]: Pods stopped: Dec 28 11:32:20 managed-node2 podman[24954]: 7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d Dec 28 11:32:20 managed-node2 podman[24954]: Pods removed: Dec 28 11:32:20 managed-node2 podman[24954]: 7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d Dec 28 11:32:20 managed-node2 podman[24954]: Secrets removed: Dec 28 11:32:20 managed-node2 podman[24954]: Volumes removed: Dec 28 11:32:20 managed-node2 systemd[23102]: Created slice user-libpod_pod_3b4388a2a9b762501c124a3661654120f9263a80f5f11456345e85d379c6d7fe.slice - cgroup user-libpod_pod_3b4388a2a9b762501c124a3661654120f9263a80f5f11456345e85d379c6d7fe.slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 86. Dec 28 11:32:20 managed-node2 systemd[23102]: Started libpod-1a70bb1a83df1295fd28d2153b789097b0f0ccfdacba4b75f6617110edd97254.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 90. Dec 28 11:32:20 managed-node2 systemd[23102]: Started rootless-netns-15dce554.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 94. Dec 28 11:32:20 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Dec 28 11:32:20 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Dec 28 11:32:20 managed-node2 kernel: veth0: entered allmulticast mode Dec 28 11:32:20 managed-node2 kernel: veth0: entered promiscuous mode Dec 28 11:32:20 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Dec 28 11:32:20 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Dec 28 11:32:20 managed-node2 systemd[23102]: Started run-r880a3ae287c74593b989382815608bfc.scope - /usr/libexec/podman/aardvark-dns --config /run/user/3001/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 98. Dec 28 11:32:20 managed-node2 systemd[23102]: Started libpod-1dca1fd830799e789626a48dcc08cc0c920b9abb4783f3320f176c42769859a3.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 102. Dec 28 11:32:20 managed-node2 systemd[23102]: Started libpod-d9139396f2177088f4a474a9b9f8bf7a8a991165bfa36c5a8571244a1a47883f.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 107. Dec 28 11:32:20 managed-node2 systemd[23102]: Started podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 71. Dec 28 11:32:20 managed-node2 podman[24954]: Pod: Dec 28 11:32:20 managed-node2 podman[24954]: 3b4388a2a9b762501c124a3661654120f9263a80f5f11456345e85d379c6d7fe Dec 28 11:32:20 managed-node2 podman[24954]: Container: Dec 28 11:32:20 managed-node2 podman[24954]: d9139396f2177088f4a474a9b9f8bf7a8a991165bfa36c5a8571244a1a47883f Dec 28 11:32:20 managed-node2 sudo[24948]: pam_unix(sudo:session): session closed for user podman_basic_user Dec 28 11:32:21 managed-node2 python3.12[25171]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Dec 28 11:32:22 managed-node2 python3.12[25303]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:32:23 managed-node2 python3.12[25436]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd2.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:32:24 managed-node2 python3.12[25568]: ansible-file Invoked with path=/tmp/lsr_ib6aykrx_podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:32:25 managed-node2 python3.12[25699]: ansible-file Invoked with path=/tmp/lsr_ib6aykrx_podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:32:25 managed-node2 systemd[4481]: Created slice background.slice - User Background Tasks Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 14. Dec 28 11:32:25 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Dec 28 11:32:25 managed-node2 systemd[4481]: Starting systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Dec 28 11:32:25 managed-node2 systemd[4481]: Finished systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Dec 28 11:32:25 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Dec 28 11:32:26 managed-node2 podman[25862]: 2024-12-28 11:32:26.37897644 -0500 EST m=+0.583326334 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Dec 28 11:32:26 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Dec 28 11:32:26 managed-node2 python3.12[26008]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:32:27 managed-node2 python3.12[26139]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:32:27 managed-node2 python3.12[26270]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 28 11:32:28 managed-node2 python3.12[26375]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd2.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1735403547.4831927-15664-184790741417584/.source.yml _original_basename=.z3l5zz8s follow=False checksum=b7bd0e32af83ded16af592ddf05292719e54426e backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:32:28 managed-node2 python3.12[26506]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Dec 28 11:32:28 managed-node2 systemd[1]: Created slice machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice - cgroup machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice. ░░ Subject: A start job for unit machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice has finished successfully. ░░ ░░ The job identifier is 2069. Dec 28 11:32:28 managed-node2 podman[26514]: 2024-12-28 11:32:28.632800229 -0500 EST m=+0.069098346 container create 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f (image=localhost/podman-pause:5.3.1-1733097600, name=1b721eddd0ec-infra, pod_id=1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4, io.buildah.version=1.38.0) Dec 28 11:32:28 managed-node2 podman[26514]: 2024-12-28 11:32:28.637189725 -0500 EST m=+0.073487908 pod create 1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4 (image=, name=httpd2) Dec 28 11:32:28 managed-node2 podman[26514]: 2024-12-28 11:32:28.663977731 -0500 EST m=+0.100275777 container create 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Dec 28 11:32:28 managed-node2 podman[26514]: 2024-12-28 11:32:28.639839913 -0500 EST m=+0.076138036 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Dec 28 11:32:28 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Dec 28 11:32:28 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Dec 28 11:32:28 managed-node2 kernel: veth0: entered allmulticast mode Dec 28 11:32:28 managed-node2 kernel: veth0: entered promiscuous mode Dec 28 11:32:28 managed-node2 NetworkManager[780]: [1735403548.6958] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/3) Dec 28 11:32:28 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Dec 28 11:32:28 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Dec 28 11:32:28 managed-node2 NetworkManager[780]: [1735403548.7019] device (veth0): carrier: link connected Dec 28 11:32:28 managed-node2 NetworkManager[780]: [1735403548.7045] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/4) Dec 28 11:32:28 managed-node2 NetworkManager[780]: [1735403548.7051] device (podman1): carrier: link connected Dec 28 11:32:28 managed-node2 (udev-worker)[26529]: Network interface NamePolicy= disabled on kernel command line. Dec 28 11:32:28 managed-node2 (udev-worker)[26530]: Network interface NamePolicy= disabled on kernel command line. Dec 28 11:32:28 managed-node2 NetworkManager[780]: [1735403548.7571] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Dec 28 11:32:28 managed-node2 NetworkManager[780]: [1735403548.7648] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Dec 28 11:32:28 managed-node2 NetworkManager[780]: [1735403548.7771] device (podman1): Activation: starting connection 'podman1' (8a968ede-a335-4ce0-8911-ebaeb783f432) Dec 28 11:32:28 managed-node2 NetworkManager[780]: [1735403548.7785] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Dec 28 11:32:28 managed-node2 NetworkManager[780]: [1735403548.7788] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external') Dec 28 11:32:28 managed-node2 NetworkManager[780]: [1735403548.7794] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external') Dec 28 11:32:28 managed-node2 NetworkManager[780]: [1735403548.7802] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Dec 28 11:32:28 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2076. Dec 28 11:32:28 managed-node2 systemd[1]: Started run-rcc96bc2b686642e3b1e916b7556e810c.scope - /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-rcc96bc2b686642e3b1e916b7556e810c.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-rcc96bc2b686642e3b1e916b7556e810c.scope has finished successfully. ░░ ░░ The job identifier is 2155. Dec 28 11:32:28 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2076. Dec 28 11:32:28 managed-node2 aardvark-dns[26555]: starting aardvark on a child with pid 26560 Dec 28 11:32:28 managed-node2 aardvark-dns[26560]: Successfully parsed config Dec 28 11:32:28 managed-node2 aardvark-dns[26560]: Listen v4 ip {"podman-default-kube-network": [10.89.0.1]} Dec 28 11:32:28 managed-node2 aardvark-dns[26560]: Listen v6 ip {} Dec 28 11:32:28 managed-node2 aardvark-dns[26560]: Using the following upstream servers: [10.29.169.13:53, 10.29.170.12:53, 10.2.32.1:53] Dec 28 11:32:28 managed-node2 NetworkManager[780]: [1735403548.8129] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Dec 28 11:32:28 managed-node2 NetworkManager[780]: [1735403548.8131] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external') Dec 28 11:32:28 managed-node2 NetworkManager[780]: [1735403548.8135] device (podman1): Activation: successful, device activated. Dec 28 11:32:28 managed-node2 systemd[1]: Started libpod-conmon-25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f.scope. ░░ Subject: A start job for unit libpod-conmon-25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f.scope has finished successfully. ░░ ░░ The job identifier is 2161. Dec 28 11:32:28 managed-node2 conmon[26570]: conmon 25518e805d2160757e9f : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/13/attach} Dec 28 11:32:28 managed-node2 conmon[26570]: conmon 25518e805d2160757e9f : terminal_ctrl_fd: 13 Dec 28 11:32:28 managed-node2 conmon[26570]: conmon 25518e805d2160757e9f : winsz read side: 17, winsz write side: 18 Dec 28 11:32:28 managed-node2 systemd[1]: Started libpod-25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f.scope - libcrun container. ░░ Subject: A start job for unit libpod-25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f.scope has finished successfully. ░░ ░░ The job identifier is 2168. Dec 28 11:32:28 managed-node2 conmon[26570]: conmon 25518e805d2160757e9f : container PID: 26572 Dec 28 11:32:28 managed-node2 podman[26514]: 2024-12-28 11:32:28.886829431 -0500 EST m=+0.323127569 container init 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f (image=localhost/podman-pause:5.3.1-1733097600, name=1b721eddd0ec-infra, pod_id=1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4, io.buildah.version=1.38.0) Dec 28 11:32:28 managed-node2 podman[26514]: 2024-12-28 11:32:28.89009851 -0500 EST m=+0.326396555 container start 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f (image=localhost/podman-pause:5.3.1-1733097600, name=1b721eddd0ec-infra, pod_id=1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4, io.buildah.version=1.38.0) Dec 28 11:32:28 managed-node2 systemd[1]: Started libpod-conmon-5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd.scope. ░░ Subject: A start job for unit libpod-conmon-5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd.scope has finished successfully. ░░ ░░ The job identifier is 2175. Dec 28 11:32:28 managed-node2 conmon[26575]: conmon 5ef0ceeadc21eec1e469 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/12/attach} Dec 28 11:32:28 managed-node2 conmon[26575]: conmon 5ef0ceeadc21eec1e469 : terminal_ctrl_fd: 12 Dec 28 11:32:28 managed-node2 conmon[26575]: conmon 5ef0ceeadc21eec1e469 : winsz read side: 16, winsz write side: 17 Dec 28 11:32:28 managed-node2 systemd[1]: Started libpod-5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd.scope - libcrun container. ░░ Subject: A start job for unit libpod-5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd.scope has finished successfully. ░░ ░░ The job identifier is 2182. Dec 28 11:32:28 managed-node2 conmon[26575]: conmon 5ef0ceeadc21eec1e469 : container PID: 26577 Dec 28 11:32:28 managed-node2 podman[26514]: 2024-12-28 11:32:28.944782138 -0500 EST m=+0.381080298 container init 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Dec 28 11:32:28 managed-node2 podman[26514]: 2024-12-28 11:32:28.950731488 -0500 EST m=+0.387029633 container start 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Dec 28 11:32:28 managed-node2 podman[26514]: 2024-12-28 11:32:28.955422911 -0500 EST m=+0.391720957 pod start 1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4 (image=, name=httpd2) Dec 28 11:32:28 managed-node2 python3.12[26506]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml Dec 28 11:32:28 managed-node2 python3.12[26506]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod: 1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4 Container: 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd Dec 28 11:32:28 managed-node2 python3.12[26506]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2024-12-28T11:32:28-05:00" level=info msg="/usr/bin/podman filtering at log level debug" time="2024-12-28T11:32:28-05:00" level=debug msg="Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2024-12-28T11:32:28-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2024-12-28T11:32:28-05:00" level=info msg="Using sqlite as database backend" time="2024-12-28T11:32:28-05:00" level=debug msg="Using graph driver overlay" time="2024-12-28T11:32:28-05:00" level=debug msg="Using graph root /var/lib/containers/storage" time="2024-12-28T11:32:28-05:00" level=debug msg="Using run root /run/containers/storage" time="2024-12-28T11:32:28-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" time="2024-12-28T11:32:28-05:00" level=debug msg="Using tmp dir /run/libpod" time="2024-12-28T11:32:28-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" time="2024-12-28T11:32:28-05:00" level=debug msg="Using transient store: false" time="2024-12-28T11:32:28-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2024-12-28T11:32:28-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2024-12-28T11:32:28-05:00" level=debug msg="Cached value indicated that metacopy is being used" time="2024-12-28T11:32:28-05:00" level=debug msg="Cached value indicated that native-diff is not being used" time="2024-12-28T11:32:28-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" time="2024-12-28T11:32:28-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" time="2024-12-28T11:32:28-05:00" level=debug msg="Initializing event backend journald" time="2024-12-28T11:32:28-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" time="2024-12-28T11:32:28-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2024-12-28T11:32:28-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2024-12-28T11:32:28-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2024-12-28T11:32:28-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" time="2024-12-28T11:32:28-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2024-12-28T11:32:28-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2024-12-28T11:32:28-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2024-12-28T11:32:28-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2024-12-28T11:32:28-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" time="2024-12-28T11:32:28-05:00" level=info msg="Setting parallel job count to 7" time="2024-12-28T11:32:28-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network 65c443db6f7e5a9e1793416c0c5d9e9890e40c9a51785023cf18dc8be7eae490 bridge podman1 2024-12-28 11:30:09.866191781 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2024-12-28T11:32:28-05:00" level=debug msg="Successfully loaded 2 networks" time="2024-12-28T11:32:28-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2024-12-28T11:32:28-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-28T11:32:28-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2024-12-28T11:32:28-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@6c58b1756bb7cf9553687745e2a74f39255a6385ad87c23e911e6951449ed327\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2024-12-28T11:32:28-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@6c58b1756bb7cf9553687745e2a74f39255a6385ad87c23e911e6951449ed327)" time="2024-12-28T11:32:28-05:00" level=debug msg="exporting opaque data as blob \"sha256:6c58b1756bb7cf9553687745e2a74f39255a6385ad87c23e911e6951449ed327\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Pod using bridge network mode" time="2024-12-28T11:32:28-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice for parent machine.slice and name libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4" time="2024-12-28T11:32:28-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice" time="2024-12-28T11:32:28-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice" time="2024-12-28T11:32:28-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2024-12-28T11:32:28-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-28T11:32:28-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2024-12-28T11:32:28-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@6c58b1756bb7cf9553687745e2a74f39255a6385ad87c23e911e6951449ed327\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2024-12-28T11:32:28-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@6c58b1756bb7cf9553687745e2a74f39255a6385ad87c23e911e6951449ed327)" time="2024-12-28T11:32:28-05:00" level=debug msg="exporting opaque data as blob \"sha256:6c58b1756bb7cf9553687745e2a74f39255a6385ad87c23e911e6951449ed327\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Inspecting image 6c58b1756bb7cf9553687745e2a74f39255a6385ad87c23e911e6951449ed327" time="2024-12-28T11:32:28-05:00" level=debug msg="exporting opaque data as blob \"sha256:6c58b1756bb7cf9553687745e2a74f39255a6385ad87c23e911e6951449ed327\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Inspecting image 6c58b1756bb7cf9553687745e2a74f39255a6385ad87c23e911e6951449ed327" time="2024-12-28T11:32:28-05:00" level=debug msg="Inspecting image 6c58b1756bb7cf9553687745e2a74f39255a6385ad87c23e911e6951449ed327" time="2024-12-28T11:32:28-05:00" level=debug msg="using systemd mode: false" time="2024-12-28T11:32:28-05:00" level=debug msg="setting container name 1b721eddd0ec-infra" time="2024-12-28T11:32:28-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Allocated lock 1 for container 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f" time="2024-12-28T11:32:28-05:00" level=debug msg="exporting opaque data as blob \"sha256:6c58b1756bb7cf9553687745e2a74f39255a6385ad87c23e911e6951449ed327\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are supported" time="2024-12-28T11:32:28-05:00" level=debug msg="Created container \"25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Container \"25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f\" has work directory \"/var/lib/containers/storage/overlay-containers/25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f/userdata\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Container \"25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f\" has run directory \"/run/containers/storage/overlay-containers/25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f/userdata\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-28T11:32:28-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-28T11:32:28-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-12-28T11:32:28-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-28T11:32:28-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-12-28T11:32:28-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2024-12-28T11:32:28-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-28T11:32:28-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-28T11:32:28-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-12-28T11:32:28-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-28T11:32:28-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-12-28T11:32:28-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-28T11:32:28-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-28T11:32:28-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-12-28T11:32:28-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-28T11:32:28-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-12-28T11:32:28-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-12-28T11:32:28-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:28-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-28T11:32:28-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-28T11:32:28-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-12-28T11:32:28-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-28T11:32:28-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-12-28T11:32:28-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-12-28T11:32:28-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:28-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-12-28T11:32:28-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-12-28T11:32:28-05:00" level=debug msg="using systemd mode: false" time="2024-12-28T11:32:28-05:00" level=debug msg="adding container to pod httpd2" time="2024-12-28T11:32:28-05:00" level=debug msg="setting container name httpd2-httpd2" time="2024-12-28T11:32:28-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2024-12-28T11:32:28-05:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2024-12-28T11:32:28-05:00" level=debug msg="Adding mount /proc" time="2024-12-28T11:32:28-05:00" level=debug msg="Adding mount /dev" time="2024-12-28T11:32:28-05:00" level=debug msg="Adding mount /dev/pts" time="2024-12-28T11:32:28-05:00" level=debug msg="Adding mount /dev/mqueue" time="2024-12-28T11:32:28-05:00" level=debug msg="Adding mount /sys" time="2024-12-28T11:32:28-05:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2024-12-28T11:32:28-05:00" level=debug msg="Allocated lock 2 for container 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd" time="2024-12-28T11:32:28-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Created container \"5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Container \"5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd\" has work directory \"/var/lib/containers/storage/overlay-containers/5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd/userdata\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Container \"5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd\" has run directory \"/run/containers/storage/overlay-containers/5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd/userdata\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Strongconnecting node 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f" time="2024-12-28T11:32:28-05:00" level=debug msg="Pushed 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f onto stack" time="2024-12-28T11:32:28-05:00" level=debug msg="Finishing node 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f. Popped 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f off stack" time="2024-12-28T11:32:28-05:00" level=debug msg="Strongconnecting node 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd" time="2024-12-28T11:32:28-05:00" level=debug msg="Pushed 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd onto stack" time="2024-12-28T11:32:28-05:00" level=debug msg="Finishing node 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd. Popped 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd off stack" time="2024-12-28T11:32:28-05:00" level=debug msg="overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/GICPFZE4VD52VXKDKOFWFG6QWF,upperdir=/var/lib/containers/storage/overlay/818c396c5e7f0de543197f68d2ce0b2347698f88e8e3f2cd4106b3b20a723b04/diff,workdir=/var/lib/containers/storage/overlay/818c396c5e7f0de543197f68d2ce0b2347698f88e8e3f2cd4106b3b20a723b04/work,nodev,metacopy=on,context=\"system_u:object_r:container_file_t:s0:c109,c131\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Made network namespace at /run/netns/netns-fe725b46-5a81-c356-183a-5f1b3e306c82 for container 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f" time="2024-12-28T11:32:28-05:00" level=debug msg="Mounted container \"25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f\" at \"/var/lib/containers/storage/overlay/818c396c5e7f0de543197f68d2ce0b2347698f88e8e3f2cd4106b3b20a723b04/merged\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Created root filesystem for container 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f at /var/lib/containers/storage/overlay/818c396c5e7f0de543197f68d2ce0b2347698f88e8e3f2cd4106b3b20a723b04/merged" [DEBUG netavark::network::validation] Validating network namespace... [DEBUG netavark::commands::setup] Setting up... [INFO netavark::firewall] Using nftables firewall driver [DEBUG netavark::network::bridge] Setup network podman-default-kube-network [DEBUG netavark::network::bridge] Container interface name: eth0 with IP addresses [10.89.0.2/24] [DEBUG netavark::network::bridge] Bridge name: podman1 with IP addresses [10.89.0.1/24] [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.ip_forward to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/podman1/rp_filter to 2 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv6/conf/eth0/autoconf to 0 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/arp_notify to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/rp_filter to 2 [INFO netavark::network::netlink] Adding route (dest: 0.0.0.0/0 ,gw: 10.89.0.1, metric 100) [DEBUG netavark::firewall::firewalld] Adding firewalld rules for network 10.89.0.0/24 [DEBUG netavark::firewall::firewalld] Adding subnet 10.89.0.0/24 to zone trusted as source [INFO netavark::firewall::nft] Creating container chain nv_65c443db_10_89_0_0_nm24 [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.conf.podman1.route_localnet to 1 [DEBUG netavark::dns::aardvark] Spawning aardvark server [DEBUG netavark::dns::aardvark] start aardvark-dns: ["systemd-run", "-q", "--scope", "/usr/libexec/podman/aardvark-dns", "--config", "/run/containers/networks/aardvark-dns", "-p", "53", "run"] [DEBUG netavark::commands::setup] { "podman-default-kube-network": StatusBlock { dns_search_domains: Some( [ "dns.podman", ], ), dns_server_ips: Some( [ 10.89.0.1, ], ), interfaces: Some( { "eth0": NetInterface { mac_address: "aa:f3:08:81:92:ab", subnets: Some( [ NetAddress { gateway: Some( 10.89.0.1, ), ipnet: 10.89.0.2/24, }, ], ), }, }, ), }, } [DEBUG netavark::commands::setup] Setup complete time="2024-12-28T11:32:28-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2024-12-28T11:32:28-05:00" level=debug msg="Setting Cgroups for container 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f to machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice:libpod:25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f" time="2024-12-28T11:32:28-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2024-12-28T11:32:28-05:00" level=debug msg="Workdir \"/\" resolved to host path \"/var/lib/containers/storage/overlay/818c396c5e7f0de543197f68d2ce0b2347698f88e8e3f2cd4106b3b20a723b04/merged\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Created OCI spec for container 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f at /var/lib/containers/storage/overlay-containers/25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f/userdata/config.json" time="2024-12-28T11:32:28-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice for parent machine.slice and name libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4" time="2024-12-28T11:32:28-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice" time="2024-12-28T11:32:28-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice" time="2024-12-28T11:32:28-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2024-12-28T11:32:28-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f -u 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f/userdata -p /run/containers/storage/overlay-containers/25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f/userdata/pidfile -n 1b721eddd0ec-infra --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f]" time="2024-12-28T11:32:28-05:00" level=info msg="Running conmon under slice machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice and unitName libpod-conmon-25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f.scope" time="2024-12-28T11:32:28-05:00" level=debug msg="Received: 26572" time="2024-12-28T11:32:28-05:00" level=info msg="Got Conmon PID as 26570" time="2024-12-28T11:32:28-05:00" level=debug msg="Created container 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f in OCI runtime" time="2024-12-28T11:32:28-05:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2024-12-28T11:32:28-05:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2024-12-28T11:32:28-05:00" level=debug msg="Starting container 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f with command [/catatonit -P]" time="2024-12-28T11:32:28-05:00" level=debug msg="Started container 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f" time="2024-12-28T11:32:28-05:00" level=debug msg="overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/QVFPLMREPESOATKWX6MZW2HMBA,upperdir=/var/lib/containers/storage/overlay/a69fa24142a756260d434ecb147f43723beffc2cf7512f043ff07c57fe5d100e/diff,workdir=/var/lib/containers/storage/overlay/a69fa24142a756260d434ecb147f43723beffc2cf7512f043ff07c57fe5d100e/work,nodev,metacopy=on,context=\"system_u:object_r:container_file_t:s0:c109,c131\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Mounted container \"5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd\" at \"/var/lib/containers/storage/overlay/a69fa24142a756260d434ecb147f43723beffc2cf7512f043ff07c57fe5d100e/merged\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Created root filesystem for container 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd at /var/lib/containers/storage/overlay/a69fa24142a756260d434ecb147f43723beffc2cf7512f043ff07c57fe5d100e/merged" time="2024-12-28T11:32:28-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2024-12-28T11:32:28-05:00" level=debug msg="Setting Cgroups for container 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd to machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice:libpod:5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd" time="2024-12-28T11:32:28-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2024-12-28T11:32:28-05:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2024-12-28T11:32:28-05:00" level=debug msg="Created OCI spec for container 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd at /var/lib/containers/storage/overlay-containers/5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd/userdata/config.json" time="2024-12-28T11:32:28-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice for parent machine.slice and name libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4" time="2024-12-28T11:32:28-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice" time="2024-12-28T11:32:28-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice" time="2024-12-28T11:32:28-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2024-12-28T11:32:28-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd -u 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd/userdata -p /run/containers/storage/overlay-containers/5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd/userdata/pidfile -n httpd2-httpd2 --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd]" time="2024-12-28T11:32:28-05:00" level=info msg="Running conmon under slice machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice and unitName libpod-conmon-5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd.scope" time="2024-12-28T11:32:28-05:00" level=debug msg="Received: 26577" time="2024-12-28T11:32:28-05:00" level=info msg="Got Conmon PID as 26575" time="2024-12-28T11:32:28-05:00" level=debug msg="Created container 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd in OCI runtime" time="2024-12-28T11:32:28-05:00" level=debug msg="Starting container 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd with command [/bin/busybox-extras httpd -f -p 80]" time="2024-12-28T11:32:28-05:00" level=debug msg="Started container 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd" time="2024-12-28T11:32:28-05:00" level=debug msg="Called kube.PersistentPostRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2024-12-28T11:32:28-05:00" level=debug msg="Shutting down engines" time="2024-12-28T11:32:28-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=26514 Dec 28 11:32:28 managed-node2 python3.12[26506]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Dec 28 11:32:29 managed-node2 python3.12[26709]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 28 11:32:29 managed-node2 systemd[1]: Reload requested from client PID 26710 ('systemctl') (unit session-5.scope)... Dec 28 11:32:29 managed-node2 systemd[1]: Reloading... Dec 28 11:32:29 managed-node2 systemd[1]: Reloading finished in 209 ms. Dec 28 11:32:30 managed-node2 python3.12[26896]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Dec 28 11:32:30 managed-node2 systemd[1]: Reload requested from client PID 26899 ('systemctl') (unit session-5.scope)... Dec 28 11:32:30 managed-node2 systemd[1]: Reloading... Dec 28 11:32:30 managed-node2 systemd[1]: Reloading finished in 209 ms. Dec 28 11:32:31 managed-node2 python3.12[27085]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Dec 28 11:32:31 managed-node2 systemd[1]: Created slice system-podman\x2dkube.slice - Slice /system/podman-kube. ░░ Subject: A start job for unit system-podman\x2dkube.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit system-podman\x2dkube.slice has finished successfully. ░░ ░░ The job identifier is 2267. Dec 28 11:32:31 managed-node2 systemd[1]: Starting podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun execution. ░░ ░░ The job identifier is 2189. Dec 28 11:32:31 managed-node2 podman[27089]: 2024-12-28 11:32:31.330130986 -0500 EST m=+0.026544264 pod stop 1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4 (image=, name=httpd2) Dec 28 11:32:31 managed-node2 systemd[1]: libpod-25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f.scope has successfully entered the 'dead' state. Dec 28 11:32:31 managed-node2 podman[27089]: 2024-12-28 11:32:31.359740965 -0500 EST m=+0.056154206 container died 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f (image=localhost/podman-pause:5.3.1-1733097600, name=1b721eddd0ec-infra, io.buildah.version=1.38.0) Dec 28 11:32:31 managed-node2 aardvark-dns[26560]: Received SIGHUP Dec 28 11:32:31 managed-node2 aardvark-dns[26560]: Successfully parsed config Dec 28 11:32:31 managed-node2 aardvark-dns[26560]: Listen v4 ip {} Dec 28 11:32:31 managed-node2 aardvark-dns[26560]: Listen v6 ip {} Dec 28 11:32:31 managed-node2 aardvark-dns[26560]: No configuration found stopping the sever Dec 28 11:32:31 managed-node2 systemd[1]: run-rcc96bc2b686642e3b1e916b7556e810c.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-rcc96bc2b686642e3b1e916b7556e810c.scope has successfully entered the 'dead' state. Dec 28 11:32:31 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Dec 28 11:32:31 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Dec 28 11:32:31 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Dec 28 11:32:31 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f)" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=info msg="Using sqlite as database backend" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Using graph driver overlay" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Using graph root /var/lib/containers/storage" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Using run root /run/containers/storage" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Using tmp dir /run/libpod" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Using transient store: false" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Cached value indicated that overlay is supported" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Cached value indicated that overlay is supported" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Cached value indicated that metacopy is being used" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Cached value indicated that native-diff is not being used" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Initializing event backend journald" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=info msg="Setting parallel job count to 7" Dec 28 11:32:31 managed-node2 NetworkManager[780]: [1735403551.4070] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Dec 28 11:32:31 managed-node2 systemd[1]: run-netns-netns\x2dfe725b46\x2d5a81\x2dc356\x2d183a\x2d5f1b3e306c82.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2dfe725b46\x2d5a81\x2dc356\x2d183a\x2d5f1b3e306c82.mount has successfully entered the 'dead' state. Dec 28 11:32:31 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f-userdata-shm.mount has successfully entered the 'dead' state. Dec 28 11:32:31 managed-node2 systemd[1]: var-lib-containers-storage-overlay-818c396c5e7f0de543197f68d2ce0b2347698f88e8e3f2cd4106b3b20a723b04-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-818c396c5e7f0de543197f68d2ce0b2347698f88e8e3f2cd4106b3b20a723b04-merged.mount has successfully entered the 'dead' state. Dec 28 11:32:31 managed-node2 podman[27089]: 2024-12-28 11:32:31.574954928 -0500 EST m=+0.271367877 container cleanup 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f (image=localhost/podman-pause:5.3.1-1733097600, name=1b721eddd0ec-infra, pod_id=1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4, io.buildah.version=1.38.0) Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f)" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Shutting down engines" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=27099 Dec 28 11:32:31 managed-node2 systemd[1]: libpod-conmon-25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f.scope has successfully entered the 'dead' state. Dec 28 11:32:41 managed-node2 podman[27089]: time="2024-12-28T11:32:41-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd2-httpd2 in 10 seconds, resorting to SIGKILL" Dec 28 11:32:41 managed-node2 conmon[26575]: conmon 5ef0ceeadc21eec1e469 : container 26577 exited with status 137 Dec 28 11:32:41 managed-node2 systemd[1]: libpod-5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd.scope has successfully entered the 'dead' state. Dec 28 11:32:41 managed-node2 podman[27089]: 2024-12-28 11:32:41.357634219 -0500 EST m=+10.054047352 container died 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test) Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd)" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=info msg="Using sqlite as database backend" Dec 28 11:32:41 managed-node2 systemd[1]: var-lib-containers-storage-overlay-a69fa24142a756260d434ecb147f43723beffc2cf7512f043ff07c57fe5d100e-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-a69fa24142a756260d434ecb147f43723beffc2cf7512f043ff07c57fe5d100e-merged.mount has successfully entered the 'dead' state. Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Using graph driver overlay" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Using graph root /var/lib/containers/storage" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Using run root /run/containers/storage" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Using tmp dir /run/libpod" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Using transient store: false" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Cached value indicated that overlay is supported" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Cached value indicated that overlay is supported" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Cached value indicated that metacopy is being used" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Cached value indicated that native-diff is not being used" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Initializing event backend journald" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=info msg="Setting parallel job count to 7" Dec 28 11:32:41 managed-node2 podman[27089]: 2024-12-28 11:32:41.387589372 -0500 EST m=+10.084002321 container cleanup 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=info msg="Received shutdown signal \"terminated\", terminating!" PID=27122 Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=info msg="Invoking shutdown handler \"libpod\"" PID=27122 Dec 28 11:32:41 managed-node2 systemd[1]: Stopping libpod-conmon-5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd.scope... ░░ Subject: A stop job for unit libpod-conmon-5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd.scope has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd.scope has begun execution. ░░ ░░ The job identifier is 2275. Dec 28 11:32:41 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Completed shutdown handler \"libpod\", duration 0s" PID=27122 Dec 28 11:32:41 managed-node2 systemd[1]: libpod-conmon-5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd.scope has successfully entered the 'dead' state. Dec 28 11:32:41 managed-node2 systemd[1]: Stopped libpod-conmon-5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd.scope. ░░ Subject: A stop job for unit libpod-conmon-5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd.scope has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd.scope has finished. ░░ ░░ The job identifier is 2275 and the job result is done. Dec 28 11:32:41 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Dec 28 11:32:41 managed-node2 systemd[1]: Removed slice machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice - cgroup machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice. ░░ Subject: A stop job for unit machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice has finished. ░░ ░░ The job identifier is 2274 and the job result is done. Dec 28 11:32:41 managed-node2 podman[27089]: 2024-12-28 11:32:41.463051907 -0500 EST m=+10.159464865 container remove 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test) Dec 28 11:32:41 managed-node2 podman[27089]: 2024-12-28 11:32:41.483585524 -0500 EST m=+10.179998492 container remove 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f (image=localhost/podman-pause:5.3.1-1733097600, name=1b721eddd0ec-infra, pod_id=1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4, io.buildah.version=1.38.0) Dec 28 11:32:41 managed-node2 systemd[1]: machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice: Failed to open /run/systemd/transient/machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice: No such file or directory Dec 28 11:32:41 managed-node2 podman[27089]: 2024-12-28 11:32:41.491008144 -0500 EST m=+10.187421090 pod remove 1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4 (image=, name=httpd2) Dec 28 11:32:41 managed-node2 podman[27089]: Pods stopped: Dec 28 11:32:41 managed-node2 podman[27089]: 1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4 Dec 28 11:32:41 managed-node2 podman[27089]: Pods removed: Dec 28 11:32:41 managed-node2 podman[27089]: 1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4 Dec 28 11:32:41 managed-node2 podman[27089]: Secrets removed: Dec 28 11:32:41 managed-node2 podman[27089]: Volumes removed: Dec 28 11:32:41 managed-node2 podman[27089]: 2024-12-28 11:32:41.510438857 -0500 EST m=+10.206851804 container create 56d61cd416db0e9873a410b57624e63df4696e3c814b658c5fab90f30567fc99 (image=localhost/podman-pause:5.3.1-1733097600, name=001056497135-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Dec 28 11:32:41 managed-node2 systemd[1]: Created slice machine-libpod_pod_c97b04ddc09a709b3b82b4754e25707e53c798f96eae35d8706b4bb401e0c41e.slice - cgroup machine-libpod_pod_c97b04ddc09a709b3b82b4754e25707e53c798f96eae35d8706b4bb401e0c41e.slice. ░░ Subject: A start job for unit machine-libpod_pod_c97b04ddc09a709b3b82b4754e25707e53c798f96eae35d8706b4bb401e0c41e.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_c97b04ddc09a709b3b82b4754e25707e53c798f96eae35d8706b4bb401e0c41e.slice has finished successfully. ░░ ░░ The job identifier is 2276. Dec 28 11:32:41 managed-node2 podman[27089]: 2024-12-28 11:32:41.550371276 -0500 EST m=+10.246784226 container create d288a7e886467ea1ffebae2e700b222365dbe90c66b316c94e95523d719d4166 (image=localhost/podman-pause:5.3.1-1733097600, name=c97b04ddc09a-infra, pod_id=c97b04ddc09a709b3b82b4754e25707e53c798f96eae35d8706b4bb401e0c41e, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, io.buildah.version=1.38.0) Dec 28 11:32:41 managed-node2 podman[27089]: 2024-12-28 11:32:41.554904013 -0500 EST m=+10.251317051 pod create c97b04ddc09a709b3b82b4754e25707e53c798f96eae35d8706b4bb401e0c41e (image=, name=httpd2) Dec 28 11:32:41 managed-node2 podman[27089]: 2024-12-28 11:32:41.579200826 -0500 EST m=+10.275613776 container create 3b2e8426a10791b4eaa8a2ab99467b23b24ac42ebce39ee71c99979595bc23ec (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=c97b04ddc09a709b3b82b4754e25707e53c798f96eae35d8706b4bb401e0c41e, app=test, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Dec 28 11:32:41 managed-node2 podman[27089]: 2024-12-28 11:32:41.579621257 -0500 EST m=+10.276034232 container restart 56d61cd416db0e9873a410b57624e63df4696e3c814b658c5fab90f30567fc99 (image=localhost/podman-pause:5.3.1-1733097600, name=001056497135-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Dec 28 11:32:41 managed-node2 podman[27089]: 2024-12-28 11:32:41.557275503 -0500 EST m=+10.253688533 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Dec 28 11:32:41 managed-node2 systemd[1]: Started libpod-56d61cd416db0e9873a410b57624e63df4696e3c814b658c5fab90f30567fc99.scope - libcrun container. ░░ Subject: A start job for unit libpod-56d61cd416db0e9873a410b57624e63df4696e3c814b658c5fab90f30567fc99.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-56d61cd416db0e9873a410b57624e63df4696e3c814b658c5fab90f30567fc99.scope has finished successfully. ░░ ░░ The job identifier is 2282. Dec 28 11:32:41 managed-node2 podman[27089]: 2024-12-28 11:32:41.643666408 -0500 EST m=+10.340079707 container init 56d61cd416db0e9873a410b57624e63df4696e3c814b658c5fab90f30567fc99 (image=localhost/podman-pause:5.3.1-1733097600, name=001056497135-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Dec 28 11:32:41 managed-node2 podman[27089]: 2024-12-28 11:32:41.646444337 -0500 EST m=+10.342857482 container start 56d61cd416db0e9873a410b57624e63df4696e3c814b658c5fab90f30567fc99 (image=localhost/podman-pause:5.3.1-1733097600, name=001056497135-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Dec 28 11:32:41 managed-node2 NetworkManager[780]: [1735403561.6591] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/5) Dec 28 11:32:41 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Dec 28 11:32:41 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Dec 28 11:32:41 managed-node2 kernel: veth0: entered allmulticast mode Dec 28 11:32:41 managed-node2 kernel: veth0: entered promiscuous mode Dec 28 11:32:41 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Dec 28 11:32:41 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Dec 28 11:32:41 managed-node2 NetworkManager[780]: [1735403561.6690] device (podman1): carrier: link connected Dec 28 11:32:41 managed-node2 NetworkManager[780]: [1735403561.6700] device (veth0): carrier: link connected Dec 28 11:32:41 managed-node2 NetworkManager[780]: [1735403561.6704] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/6) Dec 28 11:32:41 managed-node2 (udev-worker)[27143]: Network interface NamePolicy= disabled on kernel command line. Dec 28 11:32:41 managed-node2 (udev-worker)[27142]: Network interface NamePolicy= disabled on kernel command line. Dec 28 11:32:41 managed-node2 NetworkManager[780]: [1735403561.7162] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Dec 28 11:32:41 managed-node2 NetworkManager[780]: [1735403561.7168] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Dec 28 11:32:41 managed-node2 NetworkManager[780]: [1735403561.7173] device (podman1): Activation: starting connection 'podman1' (02db41d1-c81c-4f7f-9817-7016876fce31) Dec 28 11:32:41 managed-node2 NetworkManager[780]: [1735403561.7194] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Dec 28 11:32:41 managed-node2 NetworkManager[780]: [1735403561.7197] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external') Dec 28 11:32:41 managed-node2 NetworkManager[780]: [1735403561.7198] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external') Dec 28 11:32:41 managed-node2 NetworkManager[780]: [1735403561.7202] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Dec 28 11:32:41 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2289. Dec 28 11:32:41 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2289. Dec 28 11:32:41 managed-node2 NetworkManager[780]: [1735403561.7628] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Dec 28 11:32:41 managed-node2 NetworkManager[780]: [1735403561.7631] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external') Dec 28 11:32:41 managed-node2 NetworkManager[780]: [1735403561.7639] device (podman1): Activation: successful, device activated. Dec 28 11:32:41 managed-node2 systemd[1]: Started run-rbc08a3c205e64d89baf5e64d1be58554.scope - /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-rbc08a3c205e64d89baf5e64d1be58554.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-rbc08a3c205e64d89baf5e64d1be58554.scope has finished successfully. ░░ ░░ The job identifier is 2368. Dec 28 11:32:41 managed-node2 systemd[1]: Started libpod-d288a7e886467ea1ffebae2e700b222365dbe90c66b316c94e95523d719d4166.scope - libcrun container. ░░ Subject: A start job for unit libpod-d288a7e886467ea1ffebae2e700b222365dbe90c66b316c94e95523d719d4166.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-d288a7e886467ea1ffebae2e700b222365dbe90c66b316c94e95523d719d4166.scope has finished successfully. ░░ ░░ The job identifier is 2374. Dec 28 11:32:41 managed-node2 podman[27089]: 2024-12-28 11:32:41.814913125 -0500 EST m=+10.511326328 container init d288a7e886467ea1ffebae2e700b222365dbe90c66b316c94e95523d719d4166 (image=localhost/podman-pause:5.3.1-1733097600, name=c97b04ddc09a-infra, pod_id=c97b04ddc09a709b3b82b4754e25707e53c798f96eae35d8706b4bb401e0c41e, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, io.buildah.version=1.38.0) Dec 28 11:32:41 managed-node2 podman[27089]: 2024-12-28 11:32:41.817516902 -0500 EST m=+10.513929917 container start d288a7e886467ea1ffebae2e700b222365dbe90c66b316c94e95523d719d4166 (image=localhost/podman-pause:5.3.1-1733097600, name=c97b04ddc09a-infra, pod_id=c97b04ddc09a709b3b82b4754e25707e53c798f96eae35d8706b4bb401e0c41e, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, io.buildah.version=1.38.0) Dec 28 11:32:41 managed-node2 systemd[1]: Started libpod-3b2e8426a10791b4eaa8a2ab99467b23b24ac42ebce39ee71c99979595bc23ec.scope - libcrun container. ░░ Subject: A start job for unit libpod-3b2e8426a10791b4eaa8a2ab99467b23b24ac42ebce39ee71c99979595bc23ec.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-3b2e8426a10791b4eaa8a2ab99467b23b24ac42ebce39ee71c99979595bc23ec.scope has finished successfully. ░░ ░░ The job identifier is 2381. Dec 28 11:32:41 managed-node2 podman[27089]: 2024-12-28 11:32:41.857040889 -0500 EST m=+10.553453868 container init 3b2e8426a10791b4eaa8a2ab99467b23b24ac42ebce39ee71c99979595bc23ec (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=c97b04ddc09a709b3b82b4754e25707e53c798f96eae35d8706b4bb401e0c41e, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Dec 28 11:32:41 managed-node2 podman[27089]: 2024-12-28 11:32:41.859993687 -0500 EST m=+10.556406630 container start 3b2e8426a10791b4eaa8a2ab99467b23b24ac42ebce39ee71c99979595bc23ec (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=c97b04ddc09a709b3b82b4754e25707e53c798f96eae35d8706b4bb401e0c41e, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Dec 28 11:32:41 managed-node2 podman[27089]: 2024-12-28 11:32:41.864704183 -0500 EST m=+10.561117145 pod start c97b04ddc09a709b3b82b4754e25707e53c798f96eae35d8706b4bb401e0c41e (image=, name=httpd2) Dec 28 11:32:41 managed-node2 podman[27089]: Pod: Dec 28 11:32:41 managed-node2 podman[27089]: c97b04ddc09a709b3b82b4754e25707e53c798f96eae35d8706b4bb401e0c41e Dec 28 11:32:41 managed-node2 podman[27089]: Container: Dec 28 11:32:41 managed-node2 podman[27089]: 3b2e8426a10791b4eaa8a2ab99467b23b24ac42ebce39ee71c99979595bc23ec Dec 28 11:32:41 managed-node2 systemd[1]: Started podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished successfully. ░░ ░░ The job identifier is 2189. Dec 28 11:32:42 managed-node2 python3.12[27323]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:32:43 managed-node2 python3.12[27456]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd3.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:32:44 managed-node2 python3.12[27588]: ansible-file Invoked with path=/tmp/lsr_ib6aykrx_podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:32:45 managed-node2 python3.12[27719]: ansible-file Invoked with path=/tmp/lsr_ib6aykrx_podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:32:46 managed-node2 podman[27880]: 2024-12-28 11:32:46.66606436 -0500 EST m=+0.856555194 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Dec 28 11:32:47 managed-node2 python3.12[28026]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:32:47 managed-node2 python3.12[28157]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:32:48 managed-node2 python3.12[28288]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 28 11:32:48 managed-node2 python3.12[28393]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd3.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1735403567.8478522-16492-120957427058681/.source.yml _original_basename=.alralwvc follow=False checksum=7d6c191d6b3239df2f07df7943d4f739ea9dd879 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:32:48 managed-node2 python3.12[28524]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Dec 28 11:32:49 managed-node2 systemd[1]: Created slice machine-libpod_pod_70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1.slice - cgroup machine-libpod_pod_70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1.slice. ░░ Subject: A start job for unit machine-libpod_pod_70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1.slice has finished successfully. ░░ ░░ The job identifier is 2388. Dec 28 11:32:49 managed-node2 podman[28532]: 2024-12-28 11:32:49.041754705 -0500 EST m=+0.060646134 container create 4873108eff2c58e99cb91a0081b1e34baa0c9a509af1ffe1e985955af23b446a (image=localhost/podman-pause:5.3.1-1733097600, name=70c479aaa7d2-infra, pod_id=70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1, io.buildah.version=1.38.0) Dec 28 11:32:49 managed-node2 podman[28532]: 2024-12-28 11:32:49.046650062 -0500 EST m=+0.065541475 pod create 70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1 (image=, name=httpd3) Dec 28 11:32:49 managed-node2 podman[28532]: 2024-12-28 11:32:49.070972773 -0500 EST m=+0.089864372 container create 60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1, created_by=test/system/build-testimage, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry, created_at=2021-06-10T18:55:36Z) Dec 28 11:32:49 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Dec 28 11:32:49 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Dec 28 11:32:49 managed-node2 podman[28532]: 2024-12-28 11:32:49.048760037 -0500 EST m=+0.067651582 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Dec 28 11:32:49 managed-node2 kernel: veth1: entered allmulticast mode Dec 28 11:32:49 managed-node2 kernel: veth1: entered promiscuous mode Dec 28 11:32:49 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Dec 28 11:32:49 managed-node2 kernel: podman1: port 2(veth1) entered forwarding state Dec 28 11:32:49 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Dec 28 11:32:49 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Dec 28 11:32:49 managed-node2 kernel: podman1: port 2(veth1) entered forwarding state Dec 28 11:32:49 managed-node2 NetworkManager[780]: [1735403569.1130] manager: (veth1): new Veth device (/org/freedesktop/NetworkManager/Devices/7) Dec 28 11:32:49 managed-node2 NetworkManager[780]: [1735403569.1143] device (veth1): carrier: link connected Dec 28 11:32:49 managed-node2 (udev-worker)[28547]: Network interface NamePolicy= disabled on kernel command line. Dec 28 11:32:49 managed-node2 systemd[1]: Started libpod-conmon-4873108eff2c58e99cb91a0081b1e34baa0c9a509af1ffe1e985955af23b446a.scope. ░░ Subject: A start job for unit libpod-conmon-4873108eff2c58e99cb91a0081b1e34baa0c9a509af1ffe1e985955af23b446a.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-4873108eff2c58e99cb91a0081b1e34baa0c9a509af1ffe1e985955af23b446a.scope has finished successfully. ░░ ░░ The job identifier is 2395. Dec 28 11:32:49 managed-node2 systemd[1]: Started libpod-4873108eff2c58e99cb91a0081b1e34baa0c9a509af1ffe1e985955af23b446a.scope - libcrun container. ░░ Subject: A start job for unit libpod-4873108eff2c58e99cb91a0081b1e34baa0c9a509af1ffe1e985955af23b446a.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-4873108eff2c58e99cb91a0081b1e34baa0c9a509af1ffe1e985955af23b446a.scope has finished successfully. ░░ ░░ The job identifier is 2402. Dec 28 11:32:49 managed-node2 podman[28532]: 2024-12-28 11:32:49.213656897 -0500 EST m=+0.232548524 container init 4873108eff2c58e99cb91a0081b1e34baa0c9a509af1ffe1e985955af23b446a (image=localhost/podman-pause:5.3.1-1733097600, name=70c479aaa7d2-infra, pod_id=70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1, io.buildah.version=1.38.0) Dec 28 11:32:49 managed-node2 podman[28532]: 2024-12-28 11:32:49.217547099 -0500 EST m=+0.236438575 container start 4873108eff2c58e99cb91a0081b1e34baa0c9a509af1ffe1e985955af23b446a (image=localhost/podman-pause:5.3.1-1733097600, name=70c479aaa7d2-infra, pod_id=70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1, io.buildah.version=1.38.0) Dec 28 11:32:49 managed-node2 systemd[1]: Started libpod-conmon-60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7.scope. ░░ Subject: A start job for unit libpod-conmon-60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7.scope has finished successfully. ░░ ░░ The job identifier is 2409. Dec 28 11:32:49 managed-node2 systemd[1]: Started libpod-60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7.scope - libcrun container. ░░ Subject: A start job for unit libpod-60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7.scope has finished successfully. ░░ ░░ The job identifier is 2416. Dec 28 11:32:49 managed-node2 podman[28532]: 2024-12-28 11:32:49.277304737 -0500 EST m=+0.296196201 container init 60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Dec 28 11:32:49 managed-node2 podman[28532]: 2024-12-28 11:32:49.279831494 -0500 EST m=+0.298723124 container start 60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Dec 28 11:32:49 managed-node2 podman[28532]: 2024-12-28 11:32:49.284418309 -0500 EST m=+0.303309727 pod start 70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1 (image=, name=httpd3) Dec 28 11:32:49 managed-node2 python3.12[28711]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 28 11:32:49 managed-node2 systemd[1]: Reload requested from client PID 28712 ('systemctl') (unit session-5.scope)... Dec 28 11:32:49 managed-node2 systemd[1]: Reloading... Dec 28 11:32:50 managed-node2 systemd[1]: Reloading finished in 221 ms. Dec 28 11:32:50 managed-node2 python3.12[28899]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Dec 28 11:32:50 managed-node2 systemd[1]: Reload requested from client PID 28902 ('systemctl') (unit session-5.scope)... Dec 28 11:32:50 managed-node2 systemd[1]: Reloading... Dec 28 11:32:51 managed-node2 systemd[1]: Reloading finished in 220 ms. Dec 28 11:32:51 managed-node2 systemd[1]: Starting fstrim.service - Discard unused blocks on filesystems from /etc/fstab... ░░ Subject: A start job for unit fstrim.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit fstrim.service has begun execution. ░░ ░░ The job identifier is 2423. Dec 28 11:32:51 managed-node2 systemd[1]: fstrim.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit fstrim.service has successfully entered the 'dead' state. Dec 28 11:32:51 managed-node2 systemd[1]: Finished fstrim.service - Discard unused blocks on filesystems from /etc/fstab. ░░ Subject: A start job for unit fstrim.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit fstrim.service has finished successfully. ░░ ░░ The job identifier is 2423. Dec 28 11:32:51 managed-node2 python3.12[29091]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Dec 28 11:32:51 managed-node2 systemd[1]: Starting podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun execution. ░░ ░░ The job identifier is 2501. Dec 28 11:32:51 managed-node2 podman[29095]: 2024-12-28 11:32:51.738851481 -0500 EST m=+0.024762631 pod stop 70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1 (image=, name=httpd3) Dec 28 11:32:51 managed-node2 systemd[1]: libpod-4873108eff2c58e99cb91a0081b1e34baa0c9a509af1ffe1e985955af23b446a.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-4873108eff2c58e99cb91a0081b1e34baa0c9a509af1ffe1e985955af23b446a.scope has successfully entered the 'dead' state. Dec 28 11:32:51 managed-node2 podman[29095]: 2024-12-28 11:32:51.76132352 -0500 EST m=+0.047234619 container died 4873108eff2c58e99cb91a0081b1e34baa0c9a509af1ffe1e985955af23b446a (image=localhost/podman-pause:5.3.1-1733097600, name=70c479aaa7d2-infra, io.buildah.version=1.38.0) Dec 28 11:32:51 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Dec 28 11:32:51 managed-node2 kernel: veth1 (unregistering): left allmulticast mode Dec 28 11:32:51 managed-node2 kernel: veth1 (unregistering): left promiscuous mode Dec 28 11:32:51 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Dec 28 11:32:51 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Dec 28 11:32:51 managed-node2 systemd[1]: run-netns-netns\x2d363d34ef\x2dd6a9\x2d26ce\x2d8aaf\x2da1ed375e8f80.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2d363d34ef\x2dd6a9\x2d26ce\x2d8aaf\x2da1ed375e8f80.mount has successfully entered the 'dead' state. Dec 28 11:32:51 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-4873108eff2c58e99cb91a0081b1e34baa0c9a509af1ffe1e985955af23b446a-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-4873108eff2c58e99cb91a0081b1e34baa0c9a509af1ffe1e985955af23b446a-userdata-shm.mount has successfully entered the 'dead' state. Dec 28 11:32:51 managed-node2 systemd[1]: var-lib-containers-storage-overlay-24cbbe9b3e67065b5840bc0768118c6456d294ea9f22e71509544de0674a5376-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-24cbbe9b3e67065b5840bc0768118c6456d294ea9f22e71509544de0674a5376-merged.mount has successfully entered the 'dead' state. Dec 28 11:32:51 managed-node2 podman[29095]: 2024-12-28 11:32:51.838200317 -0500 EST m=+0.124111426 container cleanup 4873108eff2c58e99cb91a0081b1e34baa0c9a509af1ffe1e985955af23b446a (image=localhost/podman-pause:5.3.1-1733097600, name=70c479aaa7d2-infra, pod_id=70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1, io.buildah.version=1.38.0) Dec 28 11:32:51 managed-node2 systemd[1]: libpod-conmon-4873108eff2c58e99cb91a0081b1e34baa0c9a509af1ffe1e985955af23b446a.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-4873108eff2c58e99cb91a0081b1e34baa0c9a509af1ffe1e985955af23b446a.scope has successfully entered the 'dead' state. Dec 28 11:33:01 managed-node2 podman[29095]: time="2024-12-28T11:33:01-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd3-httpd3 in 10 seconds, resorting to SIGKILL" Dec 28 11:33:01 managed-node2 systemd[1]: libpod-60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7.scope has successfully entered the 'dead' state. Dec 28 11:33:01 managed-node2 conmon[28577]: conmon 60498560b41dfa16aeb6 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1.slice/libpod-60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7.scope/container/memory.events Dec 28 11:33:01 managed-node2 podman[29095]: 2024-12-28 11:33:01.780253993 -0500 EST m=+10.066165205 container died 60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Dec 28 11:33:01 managed-node2 systemd[1]: var-lib-containers-storage-overlay-748adaeccd2f2432451a853a5f336086cf212ebf7a14e44d566db9416d6c1044-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-748adaeccd2f2432451a853a5f336086cf212ebf7a14e44d566db9416d6c1044-merged.mount has successfully entered the 'dead' state. Dec 28 11:33:01 managed-node2 podman[29095]: 2024-12-28 11:33:01.81589406 -0500 EST m=+10.101805167 container cleanup 60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Dec 28 11:33:01 managed-node2 systemd[1]: libpod-conmon-60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7.scope has successfully entered the 'dead' state. Dec 28 11:33:01 managed-node2 systemd[1]: Stopped libpod-conmon-60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7.scope. ░░ Subject: A stop job for unit libpod-conmon-60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7.scope has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7.scope has finished. ░░ ░░ The job identifier is 2587 and the job result is done. Dec 28 11:33:01 managed-node2 systemd[1]: Removed slice machine-libpod_pod_70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1.slice - cgroup machine-libpod_pod_70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1.slice. ░░ Subject: A stop job for unit machine-libpod_pod_70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1.slice has finished. ░░ ░░ The job identifier is 2586 and the job result is done. Dec 28 11:33:01 managed-node2 podman[29095]: 2024-12-28 11:33:01.874667873 -0500 EST m=+10.160578980 container remove 60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Dec 28 11:33:01 managed-node2 podman[29095]: 2024-12-28 11:33:01.899163625 -0500 EST m=+10.185074743 container remove 4873108eff2c58e99cb91a0081b1e34baa0c9a509af1ffe1e985955af23b446a (image=localhost/podman-pause:5.3.1-1733097600, name=70c479aaa7d2-infra, pod_id=70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1, io.buildah.version=1.38.0) Dec 28 11:33:01 managed-node2 systemd[1]: machine-libpod_pod_70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1.slice: Failed to open /run/systemd/transient/machine-libpod_pod_70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1.slice: No such file or directory Dec 28 11:33:01 managed-node2 podman[29095]: 2024-12-28 11:33:01.90814883 -0500 EST m=+10.194059937 pod remove 70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1 (image=, name=httpd3) Dec 28 11:33:01 managed-node2 podman[29095]: Pods stopped: Dec 28 11:33:01 managed-node2 podman[29095]: 70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1 Dec 28 11:33:01 managed-node2 podman[29095]: Pods removed: Dec 28 11:33:01 managed-node2 podman[29095]: 70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1 Dec 28 11:33:01 managed-node2 podman[29095]: Secrets removed: Dec 28 11:33:01 managed-node2 podman[29095]: Volumes removed: Dec 28 11:33:01 managed-node2 podman[29095]: 2024-12-28 11:33:01.931505206 -0500 EST m=+10.217416321 container create 0a5a42555773a6a24c728f35a16f2572cb7591c3fa4f35470106214125b1e4a4 (image=localhost/podman-pause:5.3.1-1733097600, name=61ef7cf78877-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Dec 28 11:33:01 managed-node2 systemd[1]: Created slice machine-libpod_pod_00b5192a1328a4a5c28ba51376534cc9be9dfdcfb70812f018aa8b3e73e32f39.slice - cgroup machine-libpod_pod_00b5192a1328a4a5c28ba51376534cc9be9dfdcfb70812f018aa8b3e73e32f39.slice. ░░ Subject: A start job for unit machine-libpod_pod_00b5192a1328a4a5c28ba51376534cc9be9dfdcfb70812f018aa8b3e73e32f39.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_00b5192a1328a4a5c28ba51376534cc9be9dfdcfb70812f018aa8b3e73e32f39.slice has finished successfully. ░░ ░░ The job identifier is 2588. Dec 28 11:33:01 managed-node2 podman[29095]: 2024-12-28 11:33:01.96964967 -0500 EST m=+10.255560855 container create 4c761524f51e15bb369fc3dcac51e16b446754a6acd21c6d75d67030850a4457 (image=localhost/podman-pause:5.3.1-1733097600, name=00b5192a1328-infra, pod_id=00b5192a1328a4a5c28ba51376534cc9be9dfdcfb70812f018aa8b3e73e32f39, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Dec 28 11:33:01 managed-node2 podman[29095]: 2024-12-28 11:33:01.974886751 -0500 EST m=+10.260797856 pod create 00b5192a1328a4a5c28ba51376534cc9be9dfdcfb70812f018aa8b3e73e32f39 (image=, name=httpd3) Dec 28 11:33:01 managed-node2 podman[29095]: 2024-12-28 11:33:01.977143449 -0500 EST m=+10.263054644 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Dec 28 11:33:01 managed-node2 podman[29095]: 2024-12-28 11:33:01.99926819 -0500 EST m=+10.285179388 container create aa8d45554bfc3f5853e11e2b0ee0deb35b2207362cf9dd3437195c9fa9aa325b (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=00b5192a1328a4a5c28ba51376534cc9be9dfdcfb70812f018aa8b3e73e32f39, created_by=test/system/build-testimage, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, created_at=2021-06-10T18:55:36Z) Dec 28 11:33:01 managed-node2 podman[29095]: 2024-12-28 11:33:01.999650548 -0500 EST m=+10.285561668 container restart 0a5a42555773a6a24c728f35a16f2572cb7591c3fa4f35470106214125b1e4a4 (image=localhost/podman-pause:5.3.1-1733097600, name=61ef7cf78877-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Dec 28 11:33:02 managed-node2 systemd[1]: Started libpod-0a5a42555773a6a24c728f35a16f2572cb7591c3fa4f35470106214125b1e4a4.scope - libcrun container. ░░ Subject: A start job for unit libpod-0a5a42555773a6a24c728f35a16f2572cb7591c3fa4f35470106214125b1e4a4.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-0a5a42555773a6a24c728f35a16f2572cb7591c3fa4f35470106214125b1e4a4.scope has finished successfully. ░░ ░░ The job identifier is 2594. Dec 28 11:33:02 managed-node2 podman[29095]: 2024-12-28 11:33:02.045936814 -0500 EST m=+10.331847983 container init 0a5a42555773a6a24c728f35a16f2572cb7591c3fa4f35470106214125b1e4a4 (image=localhost/podman-pause:5.3.1-1733097600, name=61ef7cf78877-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Dec 28 11:33:02 managed-node2 podman[29095]: 2024-12-28 11:33:02.049529032 -0500 EST m=+10.335440423 container start 0a5a42555773a6a24c728f35a16f2572cb7591c3fa4f35470106214125b1e4a4 (image=localhost/podman-pause:5.3.1-1733097600, name=61ef7cf78877-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Dec 28 11:33:02 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Dec 28 11:33:02 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Dec 28 11:33:02 managed-node2 kernel: veth1: entered allmulticast mode Dec 28 11:33:02 managed-node2 kernel: veth1: entered promiscuous mode Dec 28 11:33:02 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Dec 28 11:33:02 managed-node2 kernel: podman1: port 2(veth1) entered forwarding state Dec 28 11:33:02 managed-node2 NetworkManager[780]: [1735403582.0775] manager: (veth1): new Veth device (/org/freedesktop/NetworkManager/Devices/8) Dec 28 11:33:02 managed-node2 NetworkManager[780]: [1735403582.0793] device (veth1): carrier: link connected Dec 28 11:33:02 managed-node2 (udev-worker)[29142]: Network interface NamePolicy= disabled on kernel command line. Dec 28 11:33:02 managed-node2 systemd[1]: Started libpod-4c761524f51e15bb369fc3dcac51e16b446754a6acd21c6d75d67030850a4457.scope - libcrun container. ░░ Subject: A start job for unit libpod-4c761524f51e15bb369fc3dcac51e16b446754a6acd21c6d75d67030850a4457.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-4c761524f51e15bb369fc3dcac51e16b446754a6acd21c6d75d67030850a4457.scope has finished successfully. ░░ ░░ The job identifier is 2601. Dec 28 11:33:02 managed-node2 podman[29095]: 2024-12-28 11:33:02.17052525 -0500 EST m=+10.456436441 container init 4c761524f51e15bb369fc3dcac51e16b446754a6acd21c6d75d67030850a4457 (image=localhost/podman-pause:5.3.1-1733097600, name=00b5192a1328-infra, pod_id=00b5192a1328a4a5c28ba51376534cc9be9dfdcfb70812f018aa8b3e73e32f39, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, io.buildah.version=1.38.0) Dec 28 11:33:02 managed-node2 podman[29095]: 2024-12-28 11:33:02.173125408 -0500 EST m=+10.459036745 container start 4c761524f51e15bb369fc3dcac51e16b446754a6acd21c6d75d67030850a4457 (image=localhost/podman-pause:5.3.1-1733097600, name=00b5192a1328-infra, pod_id=00b5192a1328a4a5c28ba51376534cc9be9dfdcfb70812f018aa8b3e73e32f39, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, io.buildah.version=1.38.0) Dec 28 11:33:02 managed-node2 systemd[1]: Started libpod-aa8d45554bfc3f5853e11e2b0ee0deb35b2207362cf9dd3437195c9fa9aa325b.scope - libcrun container. ░░ Subject: A start job for unit libpod-aa8d45554bfc3f5853e11e2b0ee0deb35b2207362cf9dd3437195c9fa9aa325b.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-aa8d45554bfc3f5853e11e2b0ee0deb35b2207362cf9dd3437195c9fa9aa325b.scope has finished successfully. ░░ ░░ The job identifier is 2608. Dec 28 11:33:02 managed-node2 podman[29095]: 2024-12-28 11:33:02.214681625 -0500 EST m=+10.500592798 container init aa8d45554bfc3f5853e11e2b0ee0deb35b2207362cf9dd3437195c9fa9aa325b (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=00b5192a1328a4a5c28ba51376534cc9be9dfdcfb70812f018aa8b3e73e32f39, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Dec 28 11:33:02 managed-node2 podman[29095]: 2024-12-28 11:33:02.217084923 -0500 EST m=+10.502996095 container start aa8d45554bfc3f5853e11e2b0ee0deb35b2207362cf9dd3437195c9fa9aa325b (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=00b5192a1328a4a5c28ba51376534cc9be9dfdcfb70812f018aa8b3e73e32f39, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Dec 28 11:33:02 managed-node2 podman[29095]: 2024-12-28 11:33:02.222187238 -0500 EST m=+10.508098487 pod start 00b5192a1328a4a5c28ba51376534cc9be9dfdcfb70812f018aa8b3e73e32f39 (image=, name=httpd3) Dec 28 11:33:02 managed-node2 systemd[1]: Started podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished successfully. ░░ ░░ The job identifier is 2501. Dec 28 11:33:02 managed-node2 podman[29095]: Pod: Dec 28 11:33:02 managed-node2 podman[29095]: 00b5192a1328a4a5c28ba51376534cc9be9dfdcfb70812f018aa8b3e73e32f39 Dec 28 11:33:02 managed-node2 podman[29095]: Container: Dec 28 11:33:02 managed-node2 podman[29095]: aa8d45554bfc3f5853e11e2b0ee0deb35b2207362cf9dd3437195c9fa9aa325b Dec 28 11:33:02 managed-node2 sudo[29347]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-htdnlxjuuhpujmowyetwqvegbdirbcnl ; /usr/bin/python3.12 /var/tmp/ansible-tmp-1735403582.6765993-17118-157836660933400/AnsiballZ_command.py' Dec 28 11:33:02 managed-node2 sudo[29347]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-29347) opened. Dec 28 11:33:02 managed-node2 sudo[29347]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Dec 28 11:33:03 managed-node2 python3.12[29350]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:33:03 managed-node2 systemd[23102]: Started podman-29358.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 112. Dec 28 11:33:03 managed-node2 sudo[29347]: pam_unix(sudo:session): session closed for user podman_basic_user Dec 28 11:33:03 managed-node2 python3.12[29498]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:33:03 managed-node2 python3.12[29637]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:33:04 managed-node2 sudo[29819]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nhkiqonpbohiabdlrjdubfbcitdnmczm ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1735403584.0463102-17178-155139140028051/AnsiballZ_command.py' Dec 28 11:33:04 managed-node2 sudo[29819]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-29819) opened. Dec 28 11:33:04 managed-node2 sudo[29819]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Dec 28 11:33:04 managed-node2 python3.12[29822]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --user list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd1[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:33:04 managed-node2 sudo[29819]: pam_unix(sudo:session): session closed for user podman_basic_user Dec 28 11:33:04 managed-node2 python3.12[29956]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd2[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:33:05 managed-node2 python3.12[30090]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd3[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:33:06 managed-node2 python3.12[30224]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15001/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:33:06 managed-node2 python3.12[30357]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15002/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:33:06 managed-node2 python3.12[30488]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:33:07 managed-node2 python3.12[30620]: ansible-file Invoked with path=/etc/containers/storage.conf state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:33:07 managed-node2 python3.12[30751]: ansible-file Invoked with path=/tmp/lsr_ib6aykrx_podman state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:33:10 managed-node2 python3.12[30925]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Dec 28 11:33:11 managed-node2 python3.12[31098]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:33:12 managed-node2 python3.12[31229]: ansible-ansible.legacy.dnf Invoked with name=['python3-pyasn1', 'python3-cryptography', 'python3-dbus'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 28 11:33:14 managed-node2 python3.12[31365]: ansible-ansible.legacy.dnf Invoked with name=['certmonger', 'python3-packaging'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 28 11:33:15 managed-node2 dbus-broker-launch[615]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Dec 28 11:33:15 managed-node2 dbus-broker-launch[23576]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Dec 28 11:33:15 managed-node2 dbus-broker-launch[615]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Dec 28 11:33:15 managed-node2 dbus-broker-launch[23576]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Dec 28 11:33:15 managed-node2 dbus-broker-launch[23576]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Dec 28 11:33:16 managed-node2 dbus-broker-launch[615]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Dec 28 11:33:16 managed-node2 dbus-broker-launch[23576]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Dec 28 11:33:16 managed-node2 dbus-broker-launch[23576]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Dec 28 11:33:16 managed-node2 dbus-broker-launch[23576]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Dec 28 11:33:16 managed-node2 systemd[1]: Reload requested from client PID 31373 ('systemctl') (unit session-5.scope)... Dec 28 11:33:16 managed-node2 systemd[1]: Reloading... Dec 28 11:33:16 managed-node2 systemd[1]: Reloading finished in 226 ms. Dec 28 11:33:16 managed-node2 systemd[1]: Started run-rc1d5f444a1bd4661900e9c55fb0359db.service - /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-rc1d5f444a1bd4661900e9c55fb0359db.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-rc1d5f444a1bd4661900e9c55fb0359db.service has finished successfully. ░░ ░░ The job identifier is 2619. Dec 28 11:33:16 managed-node2 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 2697. Dec 28 11:33:16 managed-node2 systemd[1]: Reload requested from client PID 31436 ('systemctl') (unit session-5.scope)... Dec 28 11:33:16 managed-node2 systemd[1]: Reloading... Dec 28 11:33:16 managed-node2 systemd[1]: Reloading finished in 356 ms. Dec 28 11:33:16 managed-node2 systemd[1]: Queuing reload/restart jobs for marked units… Dec 28 11:33:17 managed-node2 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Dec 28 11:33:17 managed-node2 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 2697. Dec 28 11:33:17 managed-node2 systemd[1]: run-rc1d5f444a1bd4661900e9c55fb0359db.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-rc1d5f444a1bd4661900e9c55fb0359db.service has successfully entered the 'dead' state. Dec 28 11:33:17 managed-node2 python3.12[31630]: ansible-file Invoked with name=/etc/certmonger//pre-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//pre-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:33:18 managed-node2 python3.12[31761]: ansible-file Invoked with name=/etc/certmonger//post-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//post-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:33:19 managed-node2 python3.12[31892]: ansible-ansible.legacy.systemd Invoked with name=certmonger state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Dec 28 11:33:19 managed-node2 systemd[1]: Reload requested from client PID 31895 ('systemctl') (unit session-5.scope)... Dec 28 11:33:19 managed-node2 systemd[1]: Reloading... Dec 28 11:33:19 managed-node2 systemd[1]: Reloading finished in 218 ms. Dec 28 11:33:19 managed-node2 systemd[1]: Starting certmonger.service - Certificate monitoring and PKI enrollment... ░░ Subject: A start job for unit certmonger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit certmonger.service has begun execution. ░░ ░░ The job identifier is 2775. Dec 28 11:33:19 managed-node2 (rtmonger)[31952]: certmonger.service: Referenced but unset environment variable evaluates to an empty string: OPTS Dec 28 11:33:19 managed-node2 systemd[1]: Started certmonger.service - Certificate monitoring and PKI enrollment. ░░ Subject: A start job for unit certmonger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit certmonger.service has finished successfully. ░░ ░░ The job identifier is 2775. Dec 28 11:33:20 managed-node2 python3.12[32110]: ansible-fedora.linux_system_roles.certificate_request Invoked with name=quadlet_demo dns=['localhost'] directory=/etc/pki/tls wait=True ca=self-sign __header=# # Ansible managed # # system_role:certificate provider_config_directory=/etc/certmonger provider=certmonger key_usage=['digitalSignature', 'keyEncipherment'] extended_key_usage=['id-kp-serverAuth', 'id-kp-clientAuth'] auto_renew=True ip=None email=None common_name=None country=None state=None locality=None organization=None organizational_unit=None contact_email=None key_size=None owner=None group=None mode=None principal=None run_before=None run_after=None Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 rsyslogd[655]: imjournal: journal files changed, reloading... [v8.2408.0-2.el10 try https://www.rsyslog.com/e/0 ] Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[32126]: Certificate in file "/etc/pki/tls/certs/quadlet_demo.crt" issued by CA and saved. Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:21 managed-node2 python3.12[32257]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt Dec 28 11:33:21 managed-node2 python3.12[32388]: ansible-slurp Invoked with path=/etc/pki/tls/private/quadlet_demo.key src=/etc/pki/tls/private/quadlet_demo.key Dec 28 11:33:22 managed-node2 python3.12[32519]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt Dec 28 11:33:22 managed-node2 python3.12[32650]: ansible-ansible.legacy.command Invoked with _raw_params=getcert stop-tracking -f /etc/pki/tls/certs/quadlet_demo.crt _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:33:22 managed-node2 certmonger[31952]: 2024-12-28 11:33:22 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:23 managed-node2 python3.12[32782]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:33:23 managed-node2 python3.12[32913]: ansible-file Invoked with path=/etc/pki/tls/private/quadlet_demo.key state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:33:23 managed-node2 python3.12[33044]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:33:24 managed-node2 python3.12[33175]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:33:25 managed-node2 python3.12[33306]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:33:26 managed-node2 python3.12[33568]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:33:27 managed-node2 python3.12[33705]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Dec 28 11:33:28 managed-node2 python3.12[33837]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:33:30 managed-node2 python3.12[33970]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:33:31 managed-node2 python3.12[34101]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:33:31 managed-node2 python3.12[34232]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 28 11:33:32 managed-node2 python3.12[34364]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Dec 28 11:33:33 managed-node2 python3.12[34497]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Dec 28 11:33:34 managed-node2 python3.12[34630]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Dec 28 11:33:34 managed-node2 python3.12[34761]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Dec 28 11:33:39 managed-node2 python3.12[35367]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:33:40 managed-node2 python3.12[35500]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:33:41 managed-node2 python3.12[35631]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.network follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 28 11:33:41 managed-node2 python3.12[35736]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1735403621.1117623-19088-239700523427720/.source.network dest=/etc/containers/systemd/quadlet-demo.network owner=root group=0 mode=0644 _original_basename=quadlet-demo.network follow=False checksum=e57c08d49aff4bae8daab138d913aeddaa8682a0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:33:42 managed-node2 python3.12[35867]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 28 11:33:42 managed-node2 systemd[1]: Reload requested from client PID 35868 ('systemctl') (unit session-5.scope)... Dec 28 11:33:42 managed-node2 systemd[1]: Reloading... Dec 28 11:33:42 managed-node2 systemd[1]: Reloading finished in 227 ms. Dec 28 11:33:43 managed-node2 python3.12[36054]: ansible-systemd Invoked with name=quadlet-demo-network.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Dec 28 11:33:43 managed-node2 systemd[1]: Starting quadlet-demo-network.service... ░░ Subject: A start job for unit quadlet-demo-network.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-network.service has begun execution. ░░ ░░ The job identifier is 2854. Dec 28 11:33:43 managed-node2 quadlet-demo-network[36058]: systemd-quadlet-demo Dec 28 11:33:43 managed-node2 systemd[1]: Finished quadlet-demo-network.service. ░░ Subject: A start job for unit quadlet-demo-network.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-network.service has finished successfully. ░░ ░░ The job identifier is 2854. Dec 28 11:33:44 managed-node2 python3.12[36196]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:33:46 managed-node2 python3.12[36329]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:33:46 managed-node2 python3.12[36460]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.volume follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 28 11:33:47 managed-node2 python3.12[36565]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1735403626.645213-19345-73721816979131/.source.volume dest=/etc/containers/systemd/quadlet-demo-mysql.volume owner=root group=0 mode=0644 _original_basename=quadlet-demo-mysql.volume follow=False checksum=585f8cbdf0ec73000f9227dcffbef71e9552ea4a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:33:47 managed-node2 python3.12[36696]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 28 11:33:47 managed-node2 systemd[1]: Reload requested from client PID 36697 ('systemctl') (unit session-5.scope)... Dec 28 11:33:47 managed-node2 systemd[1]: Reloading... Dec 28 11:33:48 managed-node2 systemd[1]: Reloading finished in 222 ms. Dec 28 11:33:48 managed-node2 python3.12[36883]: ansible-systemd Invoked with name=quadlet-demo-mysql-volume.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Dec 28 11:33:48 managed-node2 systemd[1]: Starting quadlet-demo-mysql-volume.service... ░░ Subject: A start job for unit quadlet-demo-mysql-volume.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql-volume.service has begun execution. ░░ ░░ The job identifier is 2938. Dec 28 11:33:48 managed-node2 podman[36887]: 2024-12-28 11:33:48.7119741 -0500 EST m=+0.024885300 volume create systemd-quadlet-demo-mysql Dec 28 11:33:48 managed-node2 quadlet-demo-mysql-volume[36887]: systemd-quadlet-demo-mysql Dec 28 11:33:48 managed-node2 systemd[1]: Finished quadlet-demo-mysql-volume.service. ░░ Subject: A start job for unit quadlet-demo-mysql-volume.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql-volume.service has finished successfully. ░░ ░░ The job identifier is 2938. Dec 28 11:33:49 managed-node2 python3.12[37026]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:33:50 managed-node2 python3.12[37159]: ansible-file Invoked with path=/tmp/quadlet_demo state=directory owner=root group=root mode=0777 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:33:58 managed-node2 podman[37298]: 2024-12-28 11:33:58.149897796 -0500 EST m=+6.600870142 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6 Dec 28 11:33:58 managed-node2 python3.12[37606]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:33:59 managed-node2 python3.12[37737]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 28 11:33:59 managed-node2 python3.12[37842]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/systemd/quadlet-demo-mysql.container owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1735403638.7432506-19770-169391064880553/.source.container _original_basename=.gnx8or29 follow=False checksum=ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:33:59 managed-node2 python3.12[37973]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 28 11:33:59 managed-node2 systemd[1]: Reload requested from client PID 37974 ('systemctl') (unit session-5.scope)... Dec 28 11:33:59 managed-node2 systemd[1]: Reloading... Dec 28 11:34:00 managed-node2 systemd[1]: Reloading finished in 221 ms. Dec 28 11:34:00 managed-node2 python3.12[38160]: ansible-systemd Invoked with name=quadlet-demo-mysql.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Dec 28 11:34:00 managed-node2 systemd[1]: Starting quadlet-demo-mysql.service... ░░ Subject: A start job for unit quadlet-demo-mysql.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql.service has begun execution. ░░ ░░ The job identifier is 3022. Dec 28 11:34:00 managed-node2 podman[38164]: 2024-12-28 11:34:00.770004415 -0500 EST m=+0.045043734 container create 38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Dec 28 11:34:00 managed-node2 NetworkManager[780]: [1735403640.7952] manager: (podman2): new Bridge device (/org/freedesktop/NetworkManager/Devices/9) Dec 28 11:34:00 managed-node2 kernel: podman2: port 1(veth2) entered blocking state Dec 28 11:34:00 managed-node2 kernel: podman2: port 1(veth2) entered disabled state Dec 28 11:34:00 managed-node2 kernel: veth2: entered allmulticast mode Dec 28 11:34:00 managed-node2 kernel: veth2: entered promiscuous mode Dec 28 11:34:00 managed-node2 kernel: podman2: port 1(veth2) entered blocking state Dec 28 11:34:00 managed-node2 kernel: podman2: port 1(veth2) entered forwarding state Dec 28 11:34:00 managed-node2 NetworkManager[780]: [1735403640.8108] device (veth2): carrier: link connected Dec 28 11:34:00 managed-node2 NetworkManager[780]: [1735403640.8120] manager: (veth2): new Veth device (/org/freedesktop/NetworkManager/Devices/10) Dec 28 11:34:00 managed-node2 NetworkManager[780]: [1735403640.8132] device (podman2): carrier: link connected Dec 28 11:34:00 managed-node2 (udev-worker)[38180]: Network interface NamePolicy= disabled on kernel command line. Dec 28 11:34:00 managed-node2 (udev-worker)[38179]: Network interface NamePolicy= disabled on kernel command line. Dec 28 11:34:00 managed-node2 podman[38164]: 2024-12-28 11:34:00.750797382 -0500 EST m=+0.025836842 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6 Dec 28 11:34:00 managed-node2 NetworkManager[780]: [1735403640.8602] device (podman2): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Dec 28 11:34:00 managed-node2 NetworkManager[780]: [1735403640.8632] device (podman2): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Dec 28 11:34:00 managed-node2 NetworkManager[780]: [1735403640.8657] device (podman2): Activation: starting connection 'podman2' (171d65dc-0cba-40a2-bbb1-88ff2227a67f) Dec 28 11:34:00 managed-node2 NetworkManager[780]: [1735403640.8675] device (podman2): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Dec 28 11:34:00 managed-node2 NetworkManager[780]: [1735403640.8683] device (podman2): state change: prepare -> config (reason 'none', managed-type: 'external') Dec 28 11:34:00 managed-node2 NetworkManager[780]: [1735403640.8702] device (podman2): state change: config -> ip-config (reason 'none', managed-type: 'external') Dec 28 11:34:00 managed-node2 NetworkManager[780]: [1735403640.8711] device (podman2): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Dec 28 11:34:00 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 3109. Dec 28 11:34:00 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 3109. Dec 28 11:34:00 managed-node2 NetworkManager[780]: [1735403640.9046] device (podman2): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Dec 28 11:34:00 managed-node2 NetworkManager[780]: [1735403640.9052] device (podman2): state change: secondaries -> activated (reason 'none', managed-type: 'external') Dec 28 11:34:00 managed-node2 NetworkManager[780]: [1735403640.9061] device (podman2): Activation: successful, device activated. Dec 28 11:34:00 managed-node2 systemd[1]: Started 38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca-73ebe49c63f1a996.timer - /usr/bin/podman healthcheck run 38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca. ░░ Subject: A start job for unit 38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca-73ebe49c63f1a996.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit 38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca-73ebe49c63f1a996.timer has finished successfully. ░░ ░░ The job identifier is 3188. Dec 28 11:34:00 managed-node2 podman[38164]: 2024-12-28 11:34:00.968366288 -0500 EST m=+0.243405744 container init 38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Dec 28 11:34:00 managed-node2 systemd[1]: Started quadlet-demo-mysql.service. ░░ Subject: A start job for unit quadlet-demo-mysql.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql.service has finished successfully. ░░ ░░ The job identifier is 3022. Dec 28 11:34:01 managed-node2 podman[38164]: 2024-12-28 11:34:01.025873976 -0500 EST m=+0.300913418 container start 38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Dec 28 11:34:01 managed-node2 quadlet-demo-mysql[38164]: 38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca Dec 28 11:34:01 managed-node2 podman[38228]: 2024-12-28 11:34:01.181978178 -0500 EST m=+0.138110250 container health_status 38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=0, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Dec 28 11:34:01 managed-node2 python3.12[38418]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:34:03 managed-node2 python3.12[38562]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:34:03 managed-node2 python3.12[38693]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/envoy-proxy-configmap.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 28 11:34:03 managed-node2 python3.12[38798]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1735403643.3933573-19976-127622001330519/.source.yml dest=/etc/containers/systemd/envoy-proxy-configmap.yml owner=root group=0 mode=0644 _original_basename=envoy-proxy-configmap.yml follow=False checksum=d681c7d56f912150d041873e880818b22a90c188 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:34:04 managed-node2 python3.12[38953]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 28 11:34:04 managed-node2 systemd[1]: Reload requested from client PID 38954 ('systemctl') (unit session-5.scope)... Dec 28 11:34:04 managed-node2 systemd[1]: Reloading... Dec 28 11:34:04 managed-node2 systemd[1]: Reloading finished in 239 ms. Dec 28 11:34:05 managed-node2 python3.12[39141]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:34:07 managed-node2 python3.12[39302]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:34:07 managed-node2 python3.12[39442]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 28 11:34:08 managed-node2 python3.12[39547]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/systemd/quadlet-demo.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1735403647.2769017-20133-246311741527282/.source.yml _original_basename=.ssqbkyw_ follow=False checksum=998dccde0483b1654327a46ddd89cbaa47650370 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:34:08 managed-node2 python3.12[39678]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 28 11:34:08 managed-node2 systemd[1]: Reload requested from client PID 39686 ('systemctl') (unit session-5.scope)... Dec 28 11:34:08 managed-node2 systemd[1]: Reloading... Dec 28 11:34:09 managed-node2 systemd[1]: Reloading finished in 229 ms. Dec 28 11:34:10 managed-node2 python3.12[39873]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:34:10 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Dec 28 11:34:10 managed-node2 python3.12[40006]: ansible-slurp Invoked with path=/etc/containers/systemd/quadlet-demo.yml src=/etc/containers/systemd/quadlet-demo.yml Dec 28 11:34:11 managed-node2 python3.12[40161]: ansible-file Invoked with path=/tmp/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:34:12 managed-node2 python3.12[40292]: ansible-file Invoked with path=/tmp/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:34:23 managed-node2 podman[40432]: 2024-12-28 11:34:23.845732743 -0500 EST m=+11.306967134 image pull fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache Dec 28 11:34:29 managed-node2 podman[40852]: 2024-12-28 11:34:29.041429179 -0500 EST m=+4.728776640 image pull 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0 Dec 28 11:34:29 managed-node2 python3.12[41119]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:34:29 managed-node2 python3.12[41250]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.kube follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 28 11:34:30 managed-node2 python3.12[41355]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1735403669.5789778-20856-168524673727792/.source.kube dest=/etc/containers/systemd/quadlet-demo.kube owner=root group=0 mode=0644 _original_basename=quadlet-demo.kube follow=False checksum=7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:34:30 managed-node2 python3.12[41486]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 28 11:34:30 managed-node2 systemd[1]: Reload requested from client PID 41487 ('systemctl') (unit session-5.scope)... Dec 28 11:34:30 managed-node2 systemd[1]: Reloading... Dec 28 11:34:31 managed-node2 systemd[1]: Reloading finished in 233 ms. Dec 28 11:34:31 managed-node2 podman[41652]: 2024-12-28 11:34:31.417261378 -0500 EST m=+0.139690391 container health_status 38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=0, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Dec 28 11:34:31 managed-node2 python3.12[41681]: ansible-systemd Invoked with name=quadlet-demo.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Dec 28 11:34:31 managed-node2 systemd[1]: Starting quadlet-demo.service... ░░ Subject: A start job for unit quadlet-demo.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo.service has begun execution. ░░ ░░ The job identifier is 3422. Dec 28 11:34:31 managed-node2 quadlet-demo[41693]: Pods stopped: Dec 28 11:34:31 managed-node2 quadlet-demo[41693]: Pods removed: Dec 28 11:34:31 managed-node2 quadlet-demo[41693]: Secrets removed: Dec 28 11:34:31 managed-node2 quadlet-demo[41693]: Volumes removed: Dec 28 11:34:31 managed-node2 podman[41693]: 2024-12-28 11:34:31.677835021 -0500 EST m=+0.030587276 volume create wp-pv-claim Dec 28 11:34:31 managed-node2 podman[41693]: 2024-12-28 11:34:31.769340308 -0500 EST m=+0.122092581 container create 421487fe43893496f8f6d08b7e9e4d0d8c89fa06866bc863189e3be24cce4b07 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 28 11:34:31 managed-node2 podman[41693]: 2024-12-28 11:34:31.775796735 -0500 EST m=+0.128549016 volume create envoy-proxy-config Dec 28 11:34:31 managed-node2 podman[41693]: 2024-12-28 11:34:31.781146214 -0500 EST m=+0.133898462 volume create envoy-certificates Dec 28 11:34:31 managed-node2 systemd[1]: Created slice machine-libpod_pod_55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516.slice - cgroup machine-libpod_pod_55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516.slice. ░░ Subject: A start job for unit machine-libpod_pod_55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516.slice has finished successfully. ░░ ░░ The job identifier is 3509. Dec 28 11:34:31 managed-node2 podman[41693]: 2024-12-28 11:34:31.823653704 -0500 EST m=+0.176405950 container create 6e505f6ec50bb00a7d4244a37f43e7ab8bab5a3562ba6fba14835475e734ee2d (image=localhost/podman-pause:5.3.1-1733097600, name=55132399920f-infra, pod_id=55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Dec 28 11:34:31 managed-node2 podman[41693]: 2024-12-28 11:34:31.828087967 -0500 EST m=+0.180840227 pod create 55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516 (image=, name=quadlet-demo) Dec 28 11:34:31 managed-node2 podman[41693]: 2024-12-28 11:34:31.862683945 -0500 EST m=+0.215436266 container create dcf01e22c906c7af497984418e9893491d95c19bded1bbe9cc679fe1b1935e09 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 28 11:34:31 managed-node2 podman[41693]: 2024-12-28 11:34:31.8355441 -0500 EST m=+0.188296491 image pull fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache Dec 28 11:34:31 managed-node2 podman[41693]: 2024-12-28 11:34:31.891170667 -0500 EST m=+0.243922983 container create e1dba157116cafdf7b750001b22bcc9076eff0c11fabcdcfa27d5f61bb6f62c7 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 28 11:34:31 managed-node2 podman[41693]: 2024-12-28 11:34:31.89164498 -0500 EST m=+0.244397237 container restart 421487fe43893496f8f6d08b7e9e4d0d8c89fa06866bc863189e3be24cce4b07 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 28 11:34:31 managed-node2 systemd[23102]: Starting grub-boot-success.service - Mark boot as successful... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 118. Dec 28 11:34:31 managed-node2 systemd[1]: Started libpod-421487fe43893496f8f6d08b7e9e4d0d8c89fa06866bc863189e3be24cce4b07.scope - libcrun container. ░░ Subject: A start job for unit libpod-421487fe43893496f8f6d08b7e9e4d0d8c89fa06866bc863189e3be24cce4b07.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-421487fe43893496f8f6d08b7e9e4d0d8c89fa06866bc863189e3be24cce4b07.scope has finished successfully. ░░ ░░ The job identifier is 3515. Dec 28 11:34:31 managed-node2 systemd[23102]: Finished grub-boot-success.service - Mark boot as successful. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 118. Dec 28 11:34:31 managed-node2 podman[41693]: 2024-12-28 11:34:31.938438085 -0500 EST m=+0.291190514 container init 421487fe43893496f8f6d08b7e9e4d0d8c89fa06866bc863189e3be24cce4b07 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 28 11:34:31 managed-node2 podman[41693]: 2024-12-28 11:34:31.941536497 -0500 EST m=+0.294288848 container start 421487fe43893496f8f6d08b7e9e4d0d8c89fa06866bc863189e3be24cce4b07 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 28 11:34:31 managed-node2 podman[41693]: 2024-12-28 11:34:31.870177714 -0500 EST m=+0.222930103 image pull 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0 Dec 28 11:34:31 managed-node2 kernel: podman2: port 2(veth3) entered blocking state Dec 28 11:34:31 managed-node2 kernel: podman2: port 2(veth3) entered disabled state Dec 28 11:34:31 managed-node2 kernel: veth3: entered allmulticast mode Dec 28 11:34:31 managed-node2 kernel: veth3: entered promiscuous mode Dec 28 11:34:31 managed-node2 kernel: podman2: port 2(veth3) entered blocking state Dec 28 11:34:31 managed-node2 kernel: podman2: port 2(veth3) entered forwarding state Dec 28 11:34:31 managed-node2 NetworkManager[780]: [1735403671.9773] manager: (veth3): new Veth device (/org/freedesktop/NetworkManager/Devices/11) Dec 28 11:34:31 managed-node2 NetworkManager[780]: [1735403671.9839] device (veth3): carrier: link connected Dec 28 11:34:32 managed-node2 (udev-worker)[41715]: Network interface NamePolicy= disabled on kernel command line. Dec 28 11:34:32 managed-node2 systemd[1]: Started libpod-6e505f6ec50bb00a7d4244a37f43e7ab8bab5a3562ba6fba14835475e734ee2d.scope - libcrun container. ░░ Subject: A start job for unit libpod-6e505f6ec50bb00a7d4244a37f43e7ab8bab5a3562ba6fba14835475e734ee2d.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-6e505f6ec50bb00a7d4244a37f43e7ab8bab5a3562ba6fba14835475e734ee2d.scope has finished successfully. ░░ ░░ The job identifier is 3522. Dec 28 11:34:32 managed-node2 podman[41693]: 2024-12-28 11:34:32.061572321 -0500 EST m=+0.414324662 container init 6e505f6ec50bb00a7d4244a37f43e7ab8bab5a3562ba6fba14835475e734ee2d (image=localhost/podman-pause:5.3.1-1733097600, name=55132399920f-infra, pod_id=55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Dec 28 11:34:32 managed-node2 podman[41693]: 2024-12-28 11:34:32.064370179 -0500 EST m=+0.417122543 container start 6e505f6ec50bb00a7d4244a37f43e7ab8bab5a3562ba6fba14835475e734ee2d (image=localhost/podman-pause:5.3.1-1733097600, name=55132399920f-infra, pod_id=55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 28 11:34:32 managed-node2 systemd[1]: Started libpod-dcf01e22c906c7af497984418e9893491d95c19bded1bbe9cc679fe1b1935e09.scope - libcrun container. ░░ Subject: A start job for unit libpod-dcf01e22c906c7af497984418e9893491d95c19bded1bbe9cc679fe1b1935e09.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-dcf01e22c906c7af497984418e9893491d95c19bded1bbe9cc679fe1b1935e09.scope has finished successfully. ░░ ░░ The job identifier is 3529. Dec 28 11:34:32 managed-node2 podman[41693]: 2024-12-28 11:34:32.20135626 -0500 EST m=+0.554108595 container init dcf01e22c906c7af497984418e9893491d95c19bded1bbe9cc679fe1b1935e09 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 28 11:34:32 managed-node2 podman[41693]: 2024-12-28 11:34:32.204788464 -0500 EST m=+0.557540798 container start dcf01e22c906c7af497984418e9893491d95c19bded1bbe9cc679fe1b1935e09 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 28 11:34:32 managed-node2 systemd[1]: Started libpod-e1dba157116cafdf7b750001b22bcc9076eff0c11fabcdcfa27d5f61bb6f62c7.scope - libcrun container. ░░ Subject: A start job for unit libpod-e1dba157116cafdf7b750001b22bcc9076eff0c11fabcdcfa27d5f61bb6f62c7.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-e1dba157116cafdf7b750001b22bcc9076eff0c11fabcdcfa27d5f61bb6f62c7.scope has finished successfully. ░░ ░░ The job identifier is 3536. Dec 28 11:34:32 managed-node2 podman[41693]: 2024-12-28 11:34:32.274240424 -0500 EST m=+0.626992728 container init e1dba157116cafdf7b750001b22bcc9076eff0c11fabcdcfa27d5f61bb6f62c7 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 28 11:34:32 managed-node2 podman[41693]: 2024-12-28 11:34:32.277076826 -0500 EST m=+0.629829199 container start e1dba157116cafdf7b750001b22bcc9076eff0c11fabcdcfa27d5f61bb6f62c7 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 28 11:34:32 managed-node2 podman[41693]: 2024-12-28 11:34:32.282279783 -0500 EST m=+0.635032096 pod start 55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516 (image=, name=quadlet-demo) Dec 28 11:34:32 managed-node2 systemd[1]: Started quadlet-demo.service. ░░ Subject: A start job for unit quadlet-demo.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo.service has finished successfully. ░░ ░░ The job identifier is 3422. Dec 28 11:34:32 managed-node2 quadlet-demo[41693]: Volumes: Dec 28 11:34:32 managed-node2 quadlet-demo[41693]: wp-pv-claim Dec 28 11:34:32 managed-node2 quadlet-demo[41693]: Pod: Dec 28 11:34:32 managed-node2 quadlet-demo[41693]: 55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516 Dec 28 11:34:32 managed-node2 quadlet-demo[41693]: Containers: Dec 28 11:34:32 managed-node2 quadlet-demo[41693]: dcf01e22c906c7af497984418e9893491d95c19bded1bbe9cc679fe1b1935e09 Dec 28 11:34:32 managed-node2 quadlet-demo[41693]: e1dba157116cafdf7b750001b22bcc9076eff0c11fabcdcfa27d5f61bb6f62c7 Dec 28 11:34:33 managed-node2 python3.12[41975]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /etc/containers/systemd _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:34:33 managed-node2 python3.12[42180]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps -a _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:34:34 managed-node2 python3.12[42354]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:34:34 managed-node2 python3.12[42493]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod ps --ctr-ids --ctr-names --ctr-status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:34:34 managed-node2 python3.12[42632]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail; systemctl list-units | grep quadlet _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:34:35 managed-node2 python3.12[42766]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:34:41 managed-node2 python3.12[42897]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:34:46 managed-node2 python3.12[43028]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:34:52 managed-node2 python3.12[43159]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:34:57 managed-node2 python3.12[43290]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:35:02 managed-node2 podman[43312]: 2024-12-28 11:35:02.210587379 -0500 EST m=+0.092731080 container health_status 38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=0, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Dec 28 11:35:03 managed-node2 python3.12[43438]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:35:08 managed-node2 python3.12[43569]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:35:08 managed-node2 python3.12[43700]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None TASK [Check] ******************************************************************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:148 Saturday 28 December 2024 11:35:08 -0500 (0:00:00.449) 0:01:59.245 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "-a" ], "delta": "0:00:00.039441", "end": "2024-12-28 11:35:09.339841", "rc": 0, "start": "2024-12-28 11:35:09.300400" } STDOUT: CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES 56d61cd416db localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 001056497135-service d288a7e88646 localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 0.0.0.0:15002->80/tcp c97b04ddc09a-infra 3b2e8426a107 quay.io/libpod/testimage:20210610 2 minutes ago Up 2 minutes 0.0.0.0:15002->80/tcp httpd2-httpd2 0a5a42555773 localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 61ef7cf78877-service 4c761524f51e localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 0.0.0.0:15003->80/tcp 00b5192a1328-infra aa8d45554bfc quay.io/libpod/testimage:20210610 2 minutes ago Up 2 minutes 0.0.0.0:15003->80/tcp httpd3-httpd3 38908400944e quay.io/linux-system-roles/mysql:5.6 mysqld About a minute ago Up About a minute (healthy) 3306/tcp quadlet-demo-mysql 421487fe4389 localhost/podman-pause:5.3.1-1733097600 37 seconds ago Up 38 seconds a96f3a51b8d1-service 6e505f6ec50b localhost/podman-pause:5.3.1-1733097600 37 seconds ago Up 37 seconds 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp 55132399920f-infra dcf01e22c906 quay.io/linux-system-roles/wordpress:4.8-apache apache2-foregroun... 37 seconds ago Up 37 seconds 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp, 80/tcp quadlet-demo-wordpress e1dba157116c quay.io/linux-system-roles/envoyproxy:v1.25.0 envoy -c /etc/env... 37 seconds ago Up 37 seconds 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp, 10000/tcp quadlet-demo-envoy TASK [Check pods] ************************************************************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:152 Saturday 28 December 2024 11:35:09 -0500 (0:00:00.406) 0:01:59.652 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "pod", "ps", "--ctr-ids", "--ctr-names", "--ctr-status" ], "delta": "0:00:00.039133", "end": "2024-12-28 11:35:09.746506", "failed_when_result": false, "rc": 0, "start": "2024-12-28 11:35:09.707373" } STDOUT: POD ID NAME STATUS CREATED INFRA ID IDS NAMES STATUS 55132399920f quadlet-demo Running 37 seconds ago 6e505f6ec50b 6e505f6ec50b,dcf01e22c906,e1dba157116c 55132399920f-infra,quadlet-demo-wordpress,quadlet-demo-envoy running,running,running 00b5192a1328 httpd3 Running 2 minutes ago 4c761524f51e 4c761524f51e,aa8d45554bfc 00b5192a1328-infra,httpd3-httpd3 running,running c97b04ddc09a httpd2 Running 2 minutes ago d288a7e88646 d288a7e88646,3b2e8426a107 c97b04ddc09a-infra,httpd2-httpd2 running,running TASK [Check systemd] *********************************************************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:157 Saturday 28 December 2024 11:35:09 -0500 (0:00:00.404) 0:02:00.056 ***** ok: [managed-node2] => { "changed": false, "cmd": "set -euo pipefail; systemctl list-units --all | grep quadlet", "delta": "0:00:00.015237", "end": "2024-12-28 11:35:10.124286", "failed_when_result": false, "rc": 0, "start": "2024-12-28 11:35:10.109049" } STDOUT: quadlet-demo-mysql-volume.service loaded active exited quadlet-demo-mysql-volume.service quadlet-demo-mysql.service loaded active running quadlet-demo-mysql.service quadlet-demo-network.service loaded active exited quadlet-demo-network.service quadlet-demo.service loaded active running quadlet-demo.service TASK [LS] ********************************************************************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:165 Saturday 28 December 2024 11:35:10 -0500 (0:00:00.381) 0:02:00.438 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "ls", "-alrtF", "/etc/systemd/system" ], "delta": "0:00:00.004236", "end": "2024-12-28 11:35:10.494975", "failed_when_result": false, "rc": 0, "start": "2024-12-28 11:35:10.490739" } STDOUT: total 12 drwxr-xr-x. 5 root root 47 Dec 20 02:21 ../ lrwxrwxrwx. 1 root root 43 Dec 20 02:21 dbus.service -> /usr/lib/systemd/system/dbus-broker.service drwxr-xr-x. 2 root root 32 Dec 20 02:21 getty.target.wants/ lrwxrwxrwx. 1 root root 37 Dec 20 02:21 ctrl-alt-del.target -> /usr/lib/systemd/system/reboot.target drwxr-xr-x. 2 root root 48 Dec 20 02:22 network-online.target.wants/ lrwxrwxrwx. 1 root root 57 Dec 20 02:22 dbus-org.freedesktop.nm-dispatcher.service -> /usr/lib/systemd/system/NetworkManager-dispatcher.service drwxr-xr-x. 2 root root 76 Dec 20 02:22 timers.target.wants/ drwxr-xr-x. 2 root root 38 Dec 20 02:22 dev-virtio\x2dports-org.qemu.guest_agent.0.device.wants/ lrwxrwxrwx. 1 root root 41 Dec 20 02:25 default.target -> /usr/lib/systemd/system/multi-user.target drwxr-xr-x. 2 root root 31 Dec 20 02:37 remote-fs.target.wants/ drwxr-xr-x. 2 root root 119 Dec 20 02:38 cloud-init.target.wants/ drwxr-xr-x. 2 root root 4096 Dec 20 02:38 sysinit.target.wants/ drwxr-xr-x. 2 root root 113 Dec 28 11:29 sockets.target.wants/ lrwxrwxrwx. 1 root root 41 Dec 28 11:29 dbus-org.fedoraproject.FirewallD1.service -> /usr/lib/systemd/system/firewalld.service drwxr-xr-x. 12 root root 4096 Dec 28 11:32 ./ drwxr-xr-x. 2 root root 162 Dec 28 11:32 default.target.wants/ drwxr-xr-x. 2 root root 4096 Dec 28 11:33 multi-user.target.wants/ TASK [Cleanup] ***************************************************************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:172 Saturday 28 December 2024 11:35:10 -0500 (0:00:00.366) 0:02:00.804 ***** included: fedora.linux_system_roles.podman for managed-node2 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 28 December 2024 11:35:10 -0500 (0:00:00.086) 0:02:00.891 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 28 December 2024 11:35:10 -0500 (0:00:00.086) 0:02:00.977 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 28 December 2024 11:35:10 -0500 (0:00:00.042) 0:02:01.020 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 28 December 2024 11:35:10 -0500 (0:00:00.031) 0:02:01.052 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 28 December 2024 11:35:10 -0500 (0:00:00.035) 0:02:01.087 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 28 December 2024 11:35:10 -0500 (0:00:00.048) 0:02:01.136 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 28 December 2024 11:35:10 -0500 (0:00:00.049) 0:02:01.185 ***** ok: [managed-node2] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 28 December 2024 11:35:11 -0500 (0:00:00.108) 0:02:01.293 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 28 December 2024 11:35:11 -0500 (0:00:00.779) 0:02:02.073 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 28 December 2024 11:35:11 -0500 (0:00:00.032) 0:02:02.106 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 28 December 2024 11:35:11 -0500 (0:00:00.037) 0:02:02.143 ***** skipping: [managed-node2] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 28 December 2024 11:35:11 -0500 (0:00:00.032) 0:02:02.176 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 28 December 2024 11:35:11 -0500 (0:00:00.033) 0:02:02.209 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 28 December 2024 11:35:11 -0500 (0:00:00.033) 0:02:02.243 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.024739", "end": "2024-12-28 11:35:12.352008", "rc": 0, "start": "2024-12-28 11:35:12.327269" } STDOUT: podman version 5.3.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 28 December 2024 11:35:12 -0500 (0:00:00.427) 0:02:02.671 ***** ok: [managed-node2] => { "ansible_facts": { "podman_version": "5.3.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 28 December 2024 11:35:12 -0500 (0:00:00.041) 0:02:02.712 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 28 December 2024 11:35:12 -0500 (0:00:00.039) 0:02:02.752 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 28 December 2024 11:35:12 -0500 (0:00:00.043) 0:02:02.795 ***** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 28 December 2024 11:35:12 -0500 (0:00:00.039) 0:02:02.834 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 28 December 2024 11:35:12 -0500 (0:00:00.052) 0:02:02.887 ***** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 28 December 2024 11:35:12 -0500 (0:00:00.106) 0:02:02.994 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 28 December 2024 11:35:12 -0500 (0:00:00.095) 0:02:03.089 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 28 December 2024 11:35:12 -0500 (0:00:00.056) 0:02:03.146 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 28 December 2024 11:35:12 -0500 (0:00:00.061) 0:02:03.208 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 28 December 2024 11:35:13 -0500 (0:00:00.070) 0:02:03.278 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1735403385.845081, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1735403362.2548923, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3230249097", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 28 December 2024 11:35:13 -0500 (0:00:00.408) 0:02:03.686 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 28 December 2024 11:35:13 -0500 (0:00:00.041) 0:02:03.728 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 28 December 2024 11:35:13 -0500 (0:00:00.036) 0:02:03.765 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 28 December 2024 11:35:13 -0500 (0:00:00.031) 0:02:03.797 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 28 December 2024 11:35:13 -0500 (0:00:00.033) 0:02:03.830 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 28 December 2024 11:35:13 -0500 (0:00:00.031) 0:02:03.862 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 28 December 2024 11:35:13 -0500 (0:00:00.032) 0:02:03.894 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 28 December 2024 11:35:13 -0500 (0:00:00.031) 0:02:03.926 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 28 December 2024 11:35:13 -0500 (0:00:00.037) 0:02:03.963 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Saturday 28 December 2024 11:35:13 -0500 (0:00:00.057) 0:02:04.021 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 28 December 2024 11:35:13 -0500 (0:00:00.086) 0:02:04.107 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 28 December 2024 11:35:13 -0500 (0:00:00.042) 0:02:04.149 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Saturday 28 December 2024 11:35:13 -0500 (0:00:00.037) 0:02:04.186 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 28 December 2024 11:35:14 -0500 (0:00:00.118) 0:02:04.305 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 28 December 2024 11:35:14 -0500 (0:00:00.033) 0:02:04.338 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Saturday 28 December 2024 11:35:14 -0500 (0:00:00.033) 0:02:04.372 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Saturday 28 December 2024 11:35:14 -0500 (0:00:00.086) 0:02:04.458 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Saturday 28 December 2024 11:35:14 -0500 (0:00:00.056) 0:02:04.515 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Saturday 28 December 2024 11:35:14 -0500 (0:00:00.045) 0:02:04.561 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Saturday 28 December 2024 11:35:14 -0500 (0:00:00.072) 0:02:04.633 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Saturday 28 December 2024 11:35:14 -0500 (0:00:00.037) 0:02:04.671 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Saturday 28 December 2024 11:35:14 -0500 (0:00:00.034) 0:02:04.705 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Saturday 28 December 2024 11:35:14 -0500 (0:00:00.029) 0:02:04.735 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Saturday 28 December 2024 11:35:14 -0500 (0:00:00.030) 0:02:04.765 ***** included: fedora.linux_system_roles.firewall for managed-node2 TASK [fedora.linux_system_roles.firewall : Setup firewalld] ******************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2 Saturday 28 December 2024 11:35:14 -0500 (0:00:00.102) 0:02:04.868 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node2 TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2 Saturday 28 December 2024 11:35:14 -0500 (0:00:00.079) 0:02:04.948 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if system is ostree] ********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10 Saturday 28 December 2024 11:35:14 -0500 (0:00:00.064) 0:02:05.012 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15 Saturday 28 December 2024 11:35:14 -0500 (0:00:00.051) 0:02:05.063 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22 Saturday 28 December 2024 11:35:14 -0500 (0:00:00.125) 0:02:05.189 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27 Saturday 28 December 2024 11:35:14 -0500 (0:00:00.054) 0:02:05.243 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Install firewalld] ****************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31 Saturday 28 December 2024 11:35:15 -0500 (0:00:00.055) 0:02:05.299 ***** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: firewalld TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:43 Saturday 28 December 2024 11:35:15 -0500 (0:00:00.828) 0:02:06.128 ***** skipping: [managed-node2] => { "false_condition": "__firewall_is_transactional | d(false)" } TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:48 Saturday 28 December 2024 11:35:15 -0500 (0:00:00.084) 0:02:06.212 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:53 Saturday 28 December 2024 11:35:16 -0500 (0:00:00.084) 0:02:06.297 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Collect service facts] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5 Saturday 28 December 2024 11:35:16 -0500 (0:00:00.056) 0:02:06.353 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:9 Saturday 28 December 2024 11:35:16 -0500 (0:00:00.057) 0:02:06.410 ***** skipping: [managed-node2] => (item=nftables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "nftables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=iptables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "iptables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=ufw) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "ufw", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] *********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22 Saturday 28 December 2024 11:35:16 -0500 (0:00:00.064) 0:02:06.475 ***** ok: [managed-node2] => { "changed": false, "name": "firewalld", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2024-12-28 11:29:52 EST", "ActiveEnterTimestampMonotonic": "332814185", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "polkit.service sysinit.target system.slice dbus.socket basic.target dbus-broker.service", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2024-12-28 11:29:51 EST", "AssertTimestampMonotonic": "332166798", "Before": "shutdown.target multi-user.target network-pre.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "637056000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2024-12-28 11:29:51 EST", "ConditionTimestampMonotonic": "332166794", "ConfigurationDirectoryMode": "0755", "Conflicts": "ebtables.service iptables.service shutdown.target ip6tables.service ipset.service", "ControlGroup": "/system.slice/firewalld.service", "ControlGroupId": "4592", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveCPUs": "0-1", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveMemoryNodes": "0", "EffectiveTasksMax": "22349", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Sat 2024-12-28 11:29:51 EST", "ExecMainHandoffTimestampMonotonic": "332196835", "ExecMainPID": "11035", "ExecMainStartTimestamp": "Sat 2024-12-28 11:29:51 EST", "ExecMainStartTimestampMonotonic": "332169481", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2024-12-28 11:29:51 EST", "InactiveExitTimestampMonotonic": "332170338", "InvocationID": "5e03e6ef9da5486cbe44b65fb67d7018", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "11035", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "2520236032", "MemoryCurrent": "35020800", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "35287040", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectHome": "yes", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target system.slice dbus.socket", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2024-12-28 11:34:31 EST", "StateChangeTimestampMonotonic": "611239926", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28 Saturday 28 December 2024 11:35:16 -0500 (0:00:00.582) 0:02:07.057 ***** ok: [managed-node2] => { "changed": false, "enabled": true, "name": "firewalld", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2024-12-28 11:29:52 EST", "ActiveEnterTimestampMonotonic": "332814185", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "polkit.service sysinit.target system.slice dbus.socket basic.target dbus-broker.service", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2024-12-28 11:29:51 EST", "AssertTimestampMonotonic": "332166798", "Before": "shutdown.target multi-user.target network-pre.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "637056000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2024-12-28 11:29:51 EST", "ConditionTimestampMonotonic": "332166794", "ConfigurationDirectoryMode": "0755", "Conflicts": "ebtables.service iptables.service shutdown.target ip6tables.service ipset.service", "ControlGroup": "/system.slice/firewalld.service", "ControlGroupId": "4592", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveCPUs": "0-1", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveMemoryNodes": "0", "EffectiveTasksMax": "22349", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Sat 2024-12-28 11:29:51 EST", "ExecMainHandoffTimestampMonotonic": "332196835", "ExecMainPID": "11035", "ExecMainStartTimestamp": "Sat 2024-12-28 11:29:51 EST", "ExecMainStartTimestampMonotonic": "332169481", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2024-12-28 11:29:51 EST", "InactiveExitTimestampMonotonic": "332170338", "InvocationID": "5e03e6ef9da5486cbe44b65fb67d7018", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "11035", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "2526031872", "MemoryCurrent": "35020800", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "35287040", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectHome": "yes", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target system.slice dbus.socket", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2024-12-28 11:34:31 EST", "StateChangeTimestampMonotonic": "611239926", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:34 Saturday 28 December 2024 11:35:17 -0500 (0:00:00.581) 0:02:07.639 ***** ok: [managed-node2] => { "ansible_facts": { "__firewall_previous_replaced": false, "__firewall_python_cmd": "/usr/bin/python3.12", "__firewall_report_changed": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:43 Saturday 28 December 2024 11:35:17 -0500 (0:00:00.042) 0:02:07.681 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:55 Saturday 28 December 2024 11:35:17 -0500 (0:00:00.034) 0:02:07.715 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Configure firewall] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71 Saturday 28 December 2024 11:35:17 -0500 (0:00:00.039) 0:02:07.755 ***** ok: [managed-node2] => (item={'port': '8000/tcp', 'state': 'enabled'}) => { "__firewall_changed": false, "ansible_loop_var": "item", "changed": false, "item": { "port": "8000/tcp", "state": "enabled" } } ok: [managed-node2] => (item={'port': '9000/tcp', 'state': 'enabled'}) => { "__firewall_changed": false, "ansible_loop_var": "item", "changed": false, "item": { "port": "9000/tcp", "state": "enabled" } } TASK [fedora.linux_system_roles.firewall : Gather firewall config information] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:120 Saturday 28 December 2024 11:35:18 -0500 (0:00:01.105) 0:02:08.860 ***** skipping: [managed-node2] => (item={'port': '8000/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall | length == 1", "item": { "port": "8000/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item={'port': '9000/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall | length == 1", "item": { "port": "9000/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130 Saturday 28 December 2024 11:35:18 -0500 (0:00:00.084) 0:02:08.944 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall | length == 1", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:139 Saturday 28 December 2024 11:35:18 -0500 (0:00:00.070) 0:02:09.015 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:144 Saturday 28 December 2024 11:35:18 -0500 (0:00:00.062) 0:02:09.077 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:153 Saturday 28 December 2024 11:35:18 -0500 (0:00:00.043) 0:02:09.120 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Calculate what has changed] ********* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:163 Saturday 28 December 2024 11:35:18 -0500 (0:00:00.038) 0:02:09.159 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Show diffs] ************************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:169 Saturday 28 December 2024 11:35:18 -0500 (0:00:00.047) 0:02:09.207 ***** skipping: [managed-node2] => { "false_condition": "__firewall_previous_replaced | bool" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Saturday 28 December 2024 11:35:19 -0500 (0:00:00.115) 0:02:09.322 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Saturday 28 December 2024 11:35:19 -0500 (0:00:00.052) 0:02:09.375 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Saturday 28 December 2024 11:35:19 -0500 (0:00:00.042) 0:02:09.418 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Saturday 28 December 2024 11:35:19 -0500 (0:00:00.033) 0:02:09.451 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Saturday 28 December 2024 11:35:19 -0500 (0:00:00.043) 0:02:09.495 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 28 December 2024 11:35:19 -0500 (0:00:00.134) 0:02:09.629 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 28 December 2024 11:35:19 -0500 (0:00:00.041) 0:02:09.671 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:13 Saturday 28 December 2024 11:35:19 -0500 (0:00:00.041) 0:02:09.712 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 28 December 2024 11:35:19 -0500 (0:00:00.053) 0:02:09.766 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 28 December 2024 11:35:19 -0500 (0:00:00.033) 0:02:09.800 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 28 December 2024 11:35:19 -0500 (0:00:00.031) 0:02:09.832 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:18 Saturday 28 December 2024 11:35:19 -0500 (0:00:00.037) 0:02:09.869 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:34 Saturday 28 December 2024 11:35:19 -0500 (0:00:00.039) 0:02:09.909 ***** changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 28 December 2024 11:35:20 -0500 (0:00:00.441) 0:02:10.350 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 28 December 2024 11:35:20 -0500 (0:00:00.042) 0:02:10.392 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:13 Saturday 28 December 2024 11:35:20 -0500 (0:00:00.048) 0:02:10.441 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 28 December 2024 11:35:20 -0500 (0:00:00.062) 0:02:10.504 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 28 December 2024 11:35:20 -0500 (0:00:00.031) 0:02:10.535 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 28 December 2024 11:35:20 -0500 (0:00:00.076) 0:02:10.612 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:18 Saturday 28 December 2024 11:35:20 -0500 (0:00:00.030) 0:02:10.642 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:34 Saturday 28 December 2024 11:35:20 -0500 (0:00:00.031) 0:02:10.673 ***** changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 28 December 2024 11:35:20 -0500 (0:00:00.413) 0:02:11.087 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 28 December 2024 11:35:20 -0500 (0:00:00.038) 0:02:11.125 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:13 Saturday 28 December 2024 11:35:20 -0500 (0:00:00.041) 0:02:11.166 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 28 December 2024 11:35:20 -0500 (0:00:00.053) 0:02:11.220 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 28 December 2024 11:35:21 -0500 (0:00:00.040) 0:02:11.260 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 28 December 2024 11:35:21 -0500 (0:00:00.031) 0:02:11.292 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:18 Saturday 28 December 2024 11:35:21 -0500 (0:00:00.033) 0:02:11.325 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:34 Saturday 28 December 2024 11:35:21 -0500 (0:00:00.031) 0:02:11.356 ***** changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Saturday 28 December 2024 11:35:21 -0500 (0:00:00.416) 0:02:11.773 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Saturday 28 December 2024 11:35:21 -0500 (0:00:00.031) 0:02:11.804 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 28 December 2024 11:35:21 -0500 (0:00:00.239) 0:02:12.043 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "quadlet-demo.kube", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Unit]\nRequires=quadlet-demo-mysql.service\nAfter=quadlet-demo-mysql.service\n\n[Kube]\n# Point to the yaml file in the same directory\nYaml=quadlet-demo.yml\n# Use the quadlet-demo network\nNetwork=quadlet-demo.network\n# Publish the envoy proxy data port\nPublishPort=8000:8080\n# Publish the envoy proxy admin port\nPublishPort=9000:9901\n# Use the envoy proxy config map in the same directory\nConfigMap=envoy-proxy-configmap.yml", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 28 December 2024 11:35:21 -0500 (0:00:00.065) 0:02:12.109 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 28 December 2024 11:35:21 -0500 (0:00:00.058) 0:02:12.168 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 28 December 2024 11:35:21 -0500 (0:00:00.051) 0:02:12.219 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo", "__podman_quadlet_type": "kube", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 28 December 2024 11:35:22 -0500 (0:00:00.078) 0:02:12.298 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 28 December 2024 11:35:22 -0500 (0:00:00.154) 0:02:12.452 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 28 December 2024 11:35:22 -0500 (0:00:00.053) 0:02:12.505 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 28 December 2024 11:35:22 -0500 (0:00:00.055) 0:02:12.561 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 28 December 2024 11:35:22 -0500 (0:00:00.066) 0:02:12.627 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1735403385.845081, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1735403362.2548923, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3230249097", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 28 December 2024 11:35:22 -0500 (0:00:00.379) 0:02:13.007 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 28 December 2024 11:35:22 -0500 (0:00:00.033) 0:02:13.041 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 28 December 2024 11:35:22 -0500 (0:00:00.031) 0:02:13.072 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 28 December 2024 11:35:22 -0500 (0:00:00.033) 0:02:13.106 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 28 December 2024 11:35:22 -0500 (0:00:00.032) 0:02:13.138 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 28 December 2024 11:35:22 -0500 (0:00:00.032) 0:02:13.171 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 28 December 2024 11:35:22 -0500 (0:00:00.031) 0:02:13.202 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 28 December 2024 11:35:22 -0500 (0:00:00.034) 0:02:13.237 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 28 December 2024 11:35:23 -0500 (0:00:00.055) 0:02:13.292 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": [ "quadlet-demo.yml" ], "__podman_service_name": "quadlet-demo.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 28 December 2024 11:35:23 -0500 (0:00:00.084) 0:02:13.377 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 28 December 2024 11:35:23 -0500 (0:00:00.052) 0:02:13.429 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 28 December 2024 11:35:23 -0500 (0:00:00.054) 0:02:13.483 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.kube", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 28 December 2024 11:35:23 -0500 (0:00:00.127) 0:02:13.611 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 28 December 2024 11:35:23 -0500 (0:00:00.066) 0:02:13.677 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 28 December 2024 11:35:23 -0500 (0:00:00.109) 0:02:13.787 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 28 December 2024 11:35:23 -0500 (0:00:00.096) 0:02:13.883 ***** changed: [managed-node2] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-demo.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2024-12-28 11:34:32 EST", "ActiveEnterTimestampMonotonic": "612510959", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "-.mount quadlet-demo-mysql.service basic.target network-online.target systemd-journald.socket sysinit.target system.slice quadlet-demo-network.service", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2024-12-28 11:34:31 EST", "AssertTimestampMonotonic": "611854518", "Before": "multi-user.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "269320000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2024-12-28 11:34:31 EST", "ConditionTimestampMonotonic": "611854514", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/quadlet-demo.service", "ControlGroupId": "11607", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-demo.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveCPUs": "0-1", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveMemoryNodes": "0", "EffectiveTasksMax": "22349", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "41703", "ExecMainStartTimestamp": "Sat 2024-12-28 11:34:32 EST", "ExecMainStartTimestampMonotonic": "612510925", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[Sat 2024-12-28 11:34:31 EST] ; stop_time=[n/a] ; pid=41693 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[Sat 2024-12-28 11:34:31 EST] ; stop_time=[n/a] ; pid=41693 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2024-12-28 11:34:31 EST", "InactiveExitTimestampMonotonic": "611856509", "InvocationID": "5f9a5706541c4f68b5cc2e81eedcaa0c", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "41703", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "2509598720", "MemoryCurrent": "2957312", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "25952256", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target quadlet-demo-network.service -.mount quadlet-demo-mysql.service system.slice", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo.kube", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2024-12-28 11:34:32 EST", "StateChangeTimestampMonotonic": "612510959", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "4", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 28 December 2024 11:35:24 -0500 (0:00:01.249) 0:02:15.133 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1735403670.1612227, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7", "ctime": 1735403670.1672227, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 524288815, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1735403669.8852215, "nlink": 1, "path": "/etc/containers/systemd/quadlet-demo.kube", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 456, "uid": 0, "version": "2639070164", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 28 December 2024 11:35:25 -0500 (0:00:00.586) 0:02:15.719 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 28 December 2024 11:35:25 -0500 (0:00:00.055) 0:02:15.775 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 28 December 2024 11:35:25 -0500 (0:00:00.370) 0:02:16.145 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 28 December 2024 11:35:25 -0500 (0:00:00.049) 0:02:16.195 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 28 December 2024 11:35:25 -0500 (0:00:00.031) 0:02:16.227 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 28 December 2024 11:35:26 -0500 (0:00:00.032) 0:02:16.259 ***** changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/quadlet-demo.kube", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 28 December 2024 11:35:26 -0500 (0:00:00.376) 0:02:16.635 ***** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 28 December 2024 11:35:27 -0500 (0:00:00.798) 0:02:17.433 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 28 December 2024 11:35:27 -0500 (0:00:00.058) 0:02:17.491 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 28 December 2024 11:35:27 -0500 (0:00:00.075) 0:02:17.567 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 28 December 2024 11:35:27 -0500 (0:00:00.056) 0:02:17.623 ***** changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.703813", "end": "2024-12-28 11:35:28.426522", "rc": 0, "start": "2024-12-28 11:35:27.722709" } STDOUT: fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 28 December 2024 11:35:28 -0500 (0:00:01.114) 0:02:18.738 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 28 December 2024 11:35:28 -0500 (0:00:00.060) 0:02:18.799 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 28 December 2024 11:35:28 -0500 (0:00:00.036) 0:02:18.836 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 28 December 2024 11:35:28 -0500 (0:00:00.034) 0:02:18.870 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 28 December 2024 11:35:28 -0500 (0:00:00.028) 0:02:18.899 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.032409", "end": "2024-12-28 11:35:28.991117", "rc": 0, "start": "2024-12-28 11:35:28.958708" } STDOUT: localhost/podman-pause 5.3.1-1733097600 6c58b1756bb7 5 minutes ago 701 kB quay.io/linux-system-roles/mysql 5.6 dd3b2a5dcb48 3 years ago 308 MB quay.io/libpod/testimage 20210610 9f9ec7f2fdef 3 years ago 7.99 MB TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 28 December 2024 11:35:29 -0500 (0:00:00.421) 0:02:19.320 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.029781", "end": "2024-12-28 11:35:29.443195", "rc": 0, "start": "2024-12-28 11:35:29.413414" } STDOUT: local systemd-quadlet-demo-mysql local wp-pv-claim local envoy-proxy-config local envoy-certificates TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 28 December 2024 11:35:29 -0500 (0:00:00.433) 0:02:19.754 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.035575", "end": "2024-12-28 11:35:29.862458", "rc": 0, "start": "2024-12-28 11:35:29.826883" } STDOUT: 56d61cd416db localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 001056497135-service d288a7e88646 localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 0.0.0.0:15002->80/tcp c97b04ddc09a-infra 3b2e8426a107 quay.io/libpod/testimage:20210610 2 minutes ago Up 2 minutes 0.0.0.0:15002->80/tcp httpd2-httpd2 0a5a42555773 localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 61ef7cf78877-service 4c761524f51e localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 0.0.0.0:15003->80/tcp 00b5192a1328-infra aa8d45554bfc quay.io/libpod/testimage:20210610 2 minutes ago Up 2 minutes 0.0.0.0:15003->80/tcp httpd3-httpd3 38908400944e quay.io/linux-system-roles/mysql:5.6 mysqld About a minute ago Up About a minute (healthy) 3306/tcp quadlet-demo-mysql TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 28 December 2024 11:35:30 -0500 (0:00:00.515) 0:02:20.269 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.027995", "end": "2024-12-28 11:35:30.416973", "rc": 0, "start": "2024-12-28 11:35:30.388978" } STDOUT: podman podman-default-kube-network systemd-quadlet-demo TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 28 December 2024 11:35:30 -0500 (0:00:00.476) 0:02:20.745 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 28 December 2024 11:35:30 -0500 (0:00:00.457) 0:02:21.203 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 28 December 2024 11:35:31 -0500 (0:00:00.413) 0:02:21.617 ***** ok: [managed-node2] => { "ansible_facts": { "services": { "38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca-73ebe49c63f1a996.service": { "name": "38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca-73ebe49c63f1a996.service", "source": "systemd", "state": "stopped", "status": "transient" }, "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-demo-mysql-volume.service": { "name": "quadlet-demo-mysql-volume.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-demo-mysql.service": { "name": "quadlet-demo-mysql.service", "source": "systemd", "state": "running", "status": "generated" }, "quadlet-demo-network.service": { "name": "quadlet-demo-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 28 December 2024 11:35:33 -0500 (0:00:02.344) 0:02:23.961 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 28 December 2024 11:35:33 -0500 (0:00:00.062) 0:02:24.024 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "---\napiVersion: v1\nkind: PersistentVolumeClaim\nmetadata:\n name: wp-pv-claim\n labels:\n app: wordpress\nspec:\n accessModes:\n - ReadWriteOnce\n resources:\n requests:\n storage: 20Gi\n---\napiVersion: v1\nkind: Pod\nmetadata:\n name: quadlet-demo\nspec:\n containers:\n - name: wordpress\n image: quay.io/linux-system-roles/wordpress:4.8-apache\n env:\n - name: WORDPRESS_DB_HOST\n value: quadlet-demo-mysql\n - name: WORDPRESS_DB_PASSWORD\n valueFrom:\n secretKeyRef:\n name: mysql-root-password-kube\n key: password\n volumeMounts:\n - name: wordpress-persistent-storage\n mountPath: /var/www/html\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n - name: envoy\n image: quay.io/linux-system-roles/envoyproxy:v1.25.0\n volumeMounts:\n - name: config-volume\n mountPath: /etc/envoy\n - name: certificates\n mountPath: /etc/envoy-certificates\n env:\n - name: ENVOY_UID\n value: \"0\"\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n volumes:\n - name: config-volume\n configMap:\n name: envoy-proxy-config\n - name: certificates\n secret:\n secretName: envoy-certificates\n - name: wordpress-persistent-storage\n persistentVolumeClaim:\n claimName: wp-pv-claim\n - name: www # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3\n - name: create # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3-create\n", "__podman_quadlet_template_src": "quadlet-demo.yml.j2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 28 December 2024 11:35:33 -0500 (0:00:00.198) 0:02:24.222 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 28 December 2024 11:35:34 -0500 (0:00:00.074) 0:02:24.296 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_str", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 28 December 2024 11:35:34 -0500 (0:00:00.061) 0:02:24.358 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo", "__podman_quadlet_type": "yml", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 28 December 2024 11:35:34 -0500 (0:00:00.083) 0:02:24.441 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 28 December 2024 11:35:34 -0500 (0:00:00.119) 0:02:24.561 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 28 December 2024 11:35:34 -0500 (0:00:00.066) 0:02:24.627 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 28 December 2024 11:35:34 -0500 (0:00:00.081) 0:02:24.709 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 28 December 2024 11:35:34 -0500 (0:00:00.082) 0:02:24.791 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1735403385.845081, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1735403362.2548923, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3230249097", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 28 December 2024 11:35:35 -0500 (0:00:00.460) 0:02:25.252 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 28 December 2024 11:35:35 -0500 (0:00:00.058) 0:02:25.311 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 28 December 2024 11:35:35 -0500 (0:00:00.134) 0:02:25.445 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 28 December 2024 11:35:35 -0500 (0:00:00.067) 0:02:25.513 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 28 December 2024 11:35:35 -0500 (0:00:00.062) 0:02:25.576 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 28 December 2024 11:35:35 -0500 (0:00:00.057) 0:02:25.633 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 28 December 2024 11:35:35 -0500 (0:00:00.060) 0:02:25.694 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 28 December 2024 11:35:35 -0500 (0:00:00.057) 0:02:25.752 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 28 December 2024 11:35:35 -0500 (0:00:00.076) 0:02:25.828 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 28 December 2024 11:35:35 -0500 (0:00:00.120) 0:02:25.948 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 28 December 2024 11:35:35 -0500 (0:00:00.057) 0:02:26.006 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 28 December 2024 11:35:35 -0500 (0:00:00.045) 0:02:26.052 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.yml", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 28 December 2024 11:35:35 -0500 (0:00:00.091) 0:02:26.144 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 28 December 2024 11:35:35 -0500 (0:00:00.042) 0:02:26.186 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 28 December 2024 11:35:36 -0500 (0:00:00.085) 0:02:26.272 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 28 December 2024 11:35:36 -0500 (0:00:00.048) 0:02:26.320 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_service_name | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 28 December 2024 11:35:36 -0500 (0:00:00.063) 0:02:26.383 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1735403650.9881506, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "998dccde0483b1654327a46ddd89cbaa47650370", "ctime": 1735403648.16514, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 469762265, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1735403647.7501385, "nlink": 1, "path": "/etc/containers/systemd/quadlet-demo.yml", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1605, "uid": 0, "version": "3684598483", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 28 December 2024 11:35:36 -0500 (0:00:00.509) 0:02:26.893 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 28 December 2024 11:35:36 -0500 (0:00:00.115) 0:02:27.008 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 28 December 2024 11:35:37 -0500 (0:00:00.396) 0:02:27.404 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 28 December 2024 11:35:37 -0500 (0:00:00.073) 0:02:27.478 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 28 December 2024 11:35:37 -0500 (0:00:00.145) 0:02:27.624 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 28 December 2024 11:35:37 -0500 (0:00:00.096) 0:02:27.720 ***** changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/quadlet-demo.yml", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 28 December 2024 11:35:37 -0500 (0:00:00.483) 0:02:28.204 ***** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 28 December 2024 11:35:38 -0500 (0:00:00.785) 0:02:28.989 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 28 December 2024 11:35:38 -0500 (0:00:00.064) 0:02:29.053 ***** changed: [managed-node2] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 28 December 2024 11:35:40 -0500 (0:00:01.314) 0:02:30.368 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 28 December 2024 11:35:40 -0500 (0:00:00.061) 0:02:30.430 ***** changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.027975", "end": "2024-12-28 11:35:40.549208", "rc": 0, "start": "2024-12-28 11:35:40.521233" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 28 December 2024 11:35:40 -0500 (0:00:00.457) 0:02:30.887 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 28 December 2024 11:35:40 -0500 (0:00:00.101) 0:02:30.989 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 28 December 2024 11:35:40 -0500 (0:00:00.040) 0:02:31.029 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 28 December 2024 11:35:40 -0500 (0:00:00.040) 0:02:31.070 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 28 December 2024 11:35:40 -0500 (0:00:00.096) 0:02:31.166 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.030961", "end": "2024-12-28 11:35:41.260954", "rc": 0, "start": "2024-12-28 11:35:41.229993" } STDOUT: localhost/podman-pause 5.3.1-1733097600 6c58b1756bb7 5 minutes ago 701 kB quay.io/linux-system-roles/mysql 5.6 dd3b2a5dcb48 3 years ago 308 MB quay.io/libpod/testimage 20210610 9f9ec7f2fdef 3 years ago 7.99 MB TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 28 December 2024 11:35:41 -0500 (0:00:00.410) 0:02:31.577 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.029007", "end": "2024-12-28 11:35:41.679164", "rc": 0, "start": "2024-12-28 11:35:41.650157" } STDOUT: local systemd-quadlet-demo-mysql TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 28 December 2024 11:35:41 -0500 (0:00:00.444) 0:02:32.021 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.035679", "end": "2024-12-28 11:35:42.184221", "rc": 0, "start": "2024-12-28 11:35:42.148542" } STDOUT: 56d61cd416db localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 001056497135-service d288a7e88646 localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp c97b04ddc09a-infra 3b2e8426a107 quay.io/libpod/testimage:20210610 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp httpd2-httpd2 0a5a42555773 localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 61ef7cf78877-service 4c761524f51e localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 0.0.0.0:15003->80/tcp 00b5192a1328-infra aa8d45554bfc quay.io/libpod/testimage:20210610 2 minutes ago Up 2 minutes 0.0.0.0:15003->80/tcp httpd3-httpd3 38908400944e quay.io/linux-system-roles/mysql:5.6 mysqld About a minute ago Up About a minute (healthy) 3306/tcp quadlet-demo-mysql TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 28 December 2024 11:35:42 -0500 (0:00:00.532) 0:02:32.553 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.027635", "end": "2024-12-28 11:35:42.680567", "rc": 0, "start": "2024-12-28 11:35:42.652932" } STDOUT: podman podman-default-kube-network systemd-quadlet-demo TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 28 December 2024 11:35:42 -0500 (0:00:00.481) 0:02:33.035 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 28 December 2024 11:35:43 -0500 (0:00:00.431) 0:02:33.467 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 28 December 2024 11:35:43 -0500 (0:00:00.456) 0:02:33.923 ***** ok: [managed-node2] => { "ansible_facts": { "services": { "38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca-73ebe49c63f1a996.service": { "name": "38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca-73ebe49c63f1a996.service", "source": "systemd", "state": "stopped", "status": "failed" }, "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-demo-mysql-volume.service": { "name": "quadlet-demo-mysql-volume.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-demo-mysql.service": { "name": "quadlet-demo-mysql.service", "source": "systemd", "state": "running", "status": "generated" }, "quadlet-demo-network.service": { "name": "quadlet-demo-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 28 December 2024 11:35:45 -0500 (0:00:02.108) 0:02:36.032 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 28 December 2024 11:35:45 -0500 (0:00:00.037) 0:02:36.069 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "envoy-proxy-configmap.yml", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "---\napiVersion: v1\nkind: ConfigMap\nmetadata:\n name: envoy-proxy-config\ndata:\n envoy.yaml: |\n admin:\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 9901\n\n static_resources:\n listeners:\n - name: listener_0\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 8080\n filter_chains:\n - filters:\n - name: envoy.filters.network.http_connection_manager\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager\n stat_prefix: ingress_http\n codec_type: AUTO\n route_config:\n name: local_route\n virtual_hosts:\n - name: local_service\n domains: [\"*\"]\n routes:\n - match:\n prefix: \"/\"\n route:\n cluster: backend\n http_filters:\n - name: envoy.filters.http.router\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.http.router.v3.Router\n transport_socket:\n name: envoy.transport_sockets.tls\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.transport_sockets.tls.v3.DownstreamTlsContext\n common_tls_context:\n tls_certificates:\n - certificate_chain:\n filename: /etc/envoy-certificates/certificate.pem\n private_key:\n filename: /etc/envoy-certificates/certificate.key\n clusters:\n - name: backend\n connect_timeout: 5s\n type: STATIC\n dns_refresh_rate: 1800s\n lb_policy: ROUND_ROBIN\n load_assignment:\n cluster_name: backend\n endpoints:\n - lb_endpoints:\n - endpoint:\n address:\n socket_address:\n address: 127.0.0.1\n port_value: 80", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 28 December 2024 11:35:45 -0500 (0:00:00.073) 0:02:36.143 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 28 December 2024 11:35:45 -0500 (0:00:00.098) 0:02:36.242 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 28 December 2024 11:35:46 -0500 (0:00:00.086) 0:02:36.328 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "envoy-proxy-configmap", "__podman_quadlet_type": "yml", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 28 December 2024 11:35:46 -0500 (0:00:00.069) 0:02:36.397 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 28 December 2024 11:35:46 -0500 (0:00:00.092) 0:02:36.490 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 28 December 2024 11:35:46 -0500 (0:00:00.092) 0:02:36.582 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 28 December 2024 11:35:46 -0500 (0:00:00.038) 0:02:36.621 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 28 December 2024 11:35:46 -0500 (0:00:00.071) 0:02:36.693 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1735403385.845081, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1735403362.2548923, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3230249097", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 28 December 2024 11:35:46 -0500 (0:00:00.416) 0:02:37.109 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 28 December 2024 11:35:46 -0500 (0:00:00.050) 0:02:37.159 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 28 December 2024 11:35:46 -0500 (0:00:00.053) 0:02:37.213 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 28 December 2024 11:35:47 -0500 (0:00:00.056) 0:02:37.270 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 28 December 2024 11:35:47 -0500 (0:00:00.054) 0:02:37.324 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 28 December 2024 11:35:47 -0500 (0:00:00.085) 0:02:37.410 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 28 December 2024 11:35:47 -0500 (0:00:00.079) 0:02:37.489 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 28 December 2024 11:35:47 -0500 (0:00:00.072) 0:02:37.561 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 28 December 2024 11:35:47 -0500 (0:00:00.065) 0:02:37.627 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 28 December 2024 11:35:47 -0500 (0:00:00.111) 0:02:37.739 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 28 December 2024 11:35:47 -0500 (0:00:00.081) 0:02:37.820 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 28 December 2024 11:35:47 -0500 (0:00:00.054) 0:02:37.874 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/envoy-proxy-configmap.yml", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 28 December 2024 11:35:47 -0500 (0:00:00.129) 0:02:38.004 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 28 December 2024 11:35:47 -0500 (0:00:00.077) 0:02:38.082 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 28 December 2024 11:35:48 -0500 (0:00:00.305) 0:02:38.388 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 28 December 2024 11:35:48 -0500 (0:00:00.052) 0:02:38.441 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_service_name | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 28 December 2024 11:35:48 -0500 (0:00:00.050) 0:02:38.492 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1735403671.7692287, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "d681c7d56f912150d041873e880818b22a90c188", "ctime": 1735403643.9791245, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 427819224, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1735403643.7081234, "nlink": 1, "path": "/etc/containers/systemd/envoy-proxy-configmap.yml", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 2102, "uid": 0, "version": "669467868", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 28 December 2024 11:35:48 -0500 (0:00:00.440) 0:02:38.932 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 28 December 2024 11:35:48 -0500 (0:00:00.101) 0:02:39.034 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 28 December 2024 11:35:49 -0500 (0:00:00.379) 0:02:39.413 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 28 December 2024 11:35:49 -0500 (0:00:00.043) 0:02:39.456 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 28 December 2024 11:35:49 -0500 (0:00:00.075) 0:02:39.532 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 28 December 2024 11:35:49 -0500 (0:00:00.060) 0:02:39.592 ***** changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/envoy-proxy-configmap.yml", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 28 December 2024 11:35:49 -0500 (0:00:00.440) 0:02:40.033 ***** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 28 December 2024 11:35:50 -0500 (0:00:00.831) 0:02:40.865 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 28 December 2024 11:35:50 -0500 (0:00:00.119) 0:02:40.984 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 28 December 2024 11:35:50 -0500 (0:00:00.094) 0:02:41.079 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 28 December 2024 11:35:50 -0500 (0:00:00.062) 0:02:41.141 ***** changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.030820", "end": "2024-12-28 11:35:51.291361", "rc": 0, "start": "2024-12-28 11:35:51.260541" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 28 December 2024 11:35:51 -0500 (0:00:00.533) 0:02:41.675 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 28 December 2024 11:35:51 -0500 (0:00:00.074) 0:02:41.749 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 28 December 2024 11:35:51 -0500 (0:00:00.050) 0:02:41.800 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 28 December 2024 11:35:51 -0500 (0:00:00.061) 0:02:41.861 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 28 December 2024 11:35:51 -0500 (0:00:00.049) 0:02:41.911 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.032324", "end": "2024-12-28 11:35:52.006809", "rc": 0, "start": "2024-12-28 11:35:51.974485" } STDOUT: localhost/podman-pause 5.3.1-1733097600 6c58b1756bb7 5 minutes ago 701 kB quay.io/linux-system-roles/mysql 5.6 dd3b2a5dcb48 3 years ago 308 MB quay.io/libpod/testimage 20210610 9f9ec7f2fdef 3 years ago 7.99 MB TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 28 December 2024 11:35:52 -0500 (0:00:00.422) 0:02:42.334 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.028238", "end": "2024-12-28 11:35:52.425514", "rc": 0, "start": "2024-12-28 11:35:52.397276" } STDOUT: local systemd-quadlet-demo-mysql TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 28 December 2024 11:35:52 -0500 (0:00:00.450) 0:02:42.784 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.034999", "end": "2024-12-28 11:35:52.931510", "rc": 0, "start": "2024-12-28 11:35:52.896511" } STDOUT: 56d61cd416db localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 001056497135-service d288a7e88646 localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp c97b04ddc09a-infra 3b2e8426a107 quay.io/libpod/testimage:20210610 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp httpd2-httpd2 0a5a42555773 localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 61ef7cf78877-service 4c761524f51e localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 0.0.0.0:15003->80/tcp 00b5192a1328-infra aa8d45554bfc quay.io/libpod/testimage:20210610 2 minutes ago Up 2 minutes 0.0.0.0:15003->80/tcp httpd3-httpd3 38908400944e quay.io/linux-system-roles/mysql:5.6 mysqld About a minute ago Up About a minute (healthy) 3306/tcp quadlet-demo-mysql TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 28 December 2024 11:35:53 -0500 (0:00:00.515) 0:02:43.300 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.027583", "end": "2024-12-28 11:35:53.421276", "rc": 0, "start": "2024-12-28 11:35:53.393693" } STDOUT: podman podman-default-kube-network systemd-quadlet-demo TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 28 December 2024 11:35:53 -0500 (0:00:00.434) 0:02:43.735 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 28 December 2024 11:35:53 -0500 (0:00:00.394) 0:02:44.130 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 28 December 2024 11:35:54 -0500 (0:00:00.407) 0:02:44.537 ***** ok: [managed-node2] => { "ansible_facts": { "services": { "38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca-73ebe49c63f1a996.service": { "name": "38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca-73ebe49c63f1a996.service", "source": "systemd", "state": "stopped", "status": "failed" }, "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-demo-mysql-volume.service": { "name": "quadlet-demo-mysql-volume.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-demo-mysql.service": { "name": "quadlet-demo-mysql.service", "source": "systemd", "state": "running", "status": "generated" }, "quadlet-demo-network.service": { "name": "quadlet-demo-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 28 December 2024 11:35:56 -0500 (0:00:02.048) 0:02:46.586 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 28 December 2024 11:35:56 -0500 (0:00:00.108) 0:02:46.695 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Container]\nImage=quay.io/linux-system-roles/mysql:5.6\nContainerName=quadlet-demo-mysql\nVolume=quadlet-demo-mysql.volume:/var/lib/mysql\nVolume=/tmp/quadlet_demo:/var/lib/quadlet_demo:Z\nNetwork=quadlet-demo.network\nSecret=mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD\nHealthCmd=/bin/true\nHealthOnFailure=kill\n", "__podman_quadlet_template_src": "quadlet-demo-mysql.container.j2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 28 December 2024 11:35:56 -0500 (0:00:00.270) 0:02:46.965 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 28 December 2024 11:35:56 -0500 (0:00:00.049) 0:02:47.015 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_str", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 28 December 2024 11:35:56 -0500 (0:00:00.086) 0:02:47.101 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo-mysql", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 28 December 2024 11:35:56 -0500 (0:00:00.051) 0:02:47.152 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 28 December 2024 11:35:56 -0500 (0:00:00.075) 0:02:47.227 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 28 December 2024 11:35:57 -0500 (0:00:00.079) 0:02:47.307 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 28 December 2024 11:35:57 -0500 (0:00:00.081) 0:02:47.388 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 28 December 2024 11:35:57 -0500 (0:00:00.090) 0:02:47.479 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1735403385.845081, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1735403362.2548923, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3230249097", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 28 December 2024 11:35:57 -0500 (0:00:00.437) 0:02:47.916 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 28 December 2024 11:35:57 -0500 (0:00:00.047) 0:02:47.964 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 28 December 2024 11:35:57 -0500 (0:00:00.055) 0:02:48.019 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 28 December 2024 11:35:57 -0500 (0:00:00.046) 0:02:48.066 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 28 December 2024 11:35:57 -0500 (0:00:00.039) 0:02:48.105 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 28 December 2024 11:35:57 -0500 (0:00:00.031) 0:02:48.136 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 28 December 2024 11:35:57 -0500 (0:00:00.031) 0:02:48.168 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 28 December 2024 11:35:57 -0500 (0:00:00.030) 0:02:48.198 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 28 December 2024 11:35:57 -0500 (0:00:00.031) 0:02:48.230 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-demo-mysql.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 28 December 2024 11:35:58 -0500 (0:00:00.122) 0:02:48.352 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 28 December 2024 11:35:58 -0500 (0:00:00.059) 0:02:48.411 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 28 December 2024 11:35:58 -0500 (0:00:00.042) 0:02:48.454 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.container", "__podman_volumes": [ "/tmp/quadlet_demo" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 28 December 2024 11:35:58 -0500 (0:00:00.091) 0:02:48.546 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 28 December 2024 11:35:58 -0500 (0:00:00.055) 0:02:48.602 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 28 December 2024 11:35:58 -0500 (0:00:00.124) 0:02:48.726 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 28 December 2024 11:35:58 -0500 (0:00:00.056) 0:02:48.782 ***** changed: [managed-node2] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-demo-mysql.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2024-12-28 11:34:00 EST", "ActiveEnterTimestampMonotonic": "581196665", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "systemd-journald.socket quadlet-demo-mysql-volume.service tmp.mount sysinit.target network-online.target quadlet-demo-network.service -.mount system.slice basic.target", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2024-12-28 11:34:00 EST", "AssertTimestampMonotonic": "580926290", "Before": "multi-user.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "2961549000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2024-12-28 11:34:00 EST", "ConditionTimestampMonotonic": "580926287", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/quadlet-demo-mysql.service", "ControlGroupId": "10613", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuset io memory pids", "Description": "quadlet-demo-mysql.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveCPUs": "0-1", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveMemoryNodes": "0", "EffectiveTasksMax": "22349", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "38216", "ExecMainStartTimestamp": "Sat 2024-12-28 11:34:00 EST", "ExecMainStartTimestampMonotonic": "581196616", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo-mysql.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo-mysql.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2024-12-28 11:34:00 EST", "InactiveExitTimestampMonotonic": "580935234", "InvocationID": "bb4df44732074f02890fbe8cec4d5784", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "38216", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "2617618432", "MemoryCurrent": "600477696", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "642510848", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo-mysql.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "continue", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice -.mount quadlet-demo-network.service quadlet-demo-mysql-volume.service sysinit.target", "RequiresMountsFor": "/run/containers /tmp/quadlet_demo", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.container", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2024-12-28 11:34:00 EST", "StateChangeTimestampMonotonic": "581196665", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo-mysql", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "23", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 28 December 2024 11:36:01 -0500 (0:00:02.869) 0:02:51.652 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1735403639.3331072, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4", "ctime": 1735403639.3391073, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 272630203, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1735403639.056106, "nlink": 1, "path": "/etc/containers/systemd/quadlet-demo-mysql.container", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 363, "uid": 0, "version": "1605284469", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 28 December 2024 11:36:01 -0500 (0:00:00.456) 0:02:52.108 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 28 December 2024 11:36:01 -0500 (0:00:00.106) 0:02:52.215 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 28 December 2024 11:36:02 -0500 (0:00:00.408) 0:02:52.623 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 28 December 2024 11:36:02 -0500 (0:00:00.089) 0:02:52.712 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 28 December 2024 11:36:02 -0500 (0:00:00.061) 0:02:52.774 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 28 December 2024 11:36:02 -0500 (0:00:00.062) 0:02:52.837 ***** changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/quadlet-demo-mysql.container", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 28 December 2024 11:36:02 -0500 (0:00:00.377) 0:02:53.214 ***** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 28 December 2024 11:36:03 -0500 (0:00:00.859) 0:02:54.074 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 28 December 2024 11:36:04 -0500 (0:00:00.479) 0:02:54.553 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 28 December 2024 11:36:04 -0500 (0:00:00.081) 0:02:54.635 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 28 December 2024 11:36:04 -0500 (0:00:00.062) 0:02:54.697 ***** changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.235628", "end": "2024-12-28 11:36:05.011078", "rc": 0, "start": "2024-12-28 11:36:04.775450" } STDOUT: dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 28 December 2024 11:36:05 -0500 (0:00:00.654) 0:02:55.351 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 28 December 2024 11:36:05 -0500 (0:00:00.116) 0:02:55.467 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 28 December 2024 11:36:05 -0500 (0:00:00.061) 0:02:55.529 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 28 December 2024 11:36:05 -0500 (0:00:00.059) 0:02:55.589 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 28 December 2024 11:36:05 -0500 (0:00:00.060) 0:02:55.649 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.030093", "end": "2024-12-28 11:36:05.770246", "rc": 0, "start": "2024-12-28 11:36:05.740153" } STDOUT: localhost/podman-pause 5.3.1-1733097600 6c58b1756bb7 5 minutes ago 701 kB quay.io/libpod/testimage 20210610 9f9ec7f2fdef 3 years ago 7.99 MB TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 28 December 2024 11:36:05 -0500 (0:00:00.454) 0:02:56.104 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.027626", "end": "2024-12-28 11:36:06.205709", "rc": 0, "start": "2024-12-28 11:36:06.178083" } STDOUT: local systemd-quadlet-demo-mysql TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 28 December 2024 11:36:06 -0500 (0:00:00.415) 0:02:56.520 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.034621", "end": "2024-12-28 11:36:06.608601", "rc": 0, "start": "2024-12-28 11:36:06.573980" } STDOUT: 56d61cd416db localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 001056497135-service d288a7e88646 localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp c97b04ddc09a-infra 3b2e8426a107 quay.io/libpod/testimage:20210610 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp httpd2-httpd2 0a5a42555773 localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 61ef7cf78877-service 4c761524f51e localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 0.0.0.0:15003->80/tcp 00b5192a1328-infra aa8d45554bfc quay.io/libpod/testimage:20210610 3 minutes ago Up 3 minutes 0.0.0.0:15003->80/tcp httpd3-httpd3 TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 28 December 2024 11:36:06 -0500 (0:00:00.404) 0:02:56.925 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.027714", "end": "2024-12-28 11:36:07.013461", "rc": 0, "start": "2024-12-28 11:36:06.985747" } STDOUT: podman podman-default-kube-network systemd-quadlet-demo TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 28 December 2024 11:36:07 -0500 (0:00:00.403) 0:02:57.328 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 28 December 2024 11:36:07 -0500 (0:00:00.397) 0:02:57.726 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 28 December 2024 11:36:07 -0500 (0:00:00.412) 0:02:58.139 ***** ok: [managed-node2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "running", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-demo-mysql-volume.service": { "name": "quadlet-demo-mysql-volume.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-demo-network.service": { "name": "quadlet-demo-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 28 December 2024 11:36:09 -0500 (0:00:02.064) 0:03:00.203 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 28 December 2024 11:36:10 -0500 (0:00:00.042) 0:03:00.246 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "quadlet-demo-mysql.volume", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Volume]", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 28 December 2024 11:36:10 -0500 (0:00:00.048) 0:03:00.294 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 28 December 2024 11:36:10 -0500 (0:00:00.049) 0:03:00.343 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 28 December 2024 11:36:10 -0500 (0:00:00.054) 0:03:00.398 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo-mysql", "__podman_quadlet_type": "volume", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 28 December 2024 11:36:10 -0500 (0:00:00.073) 0:03:00.472 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 28 December 2024 11:36:10 -0500 (0:00:00.077) 0:03:00.549 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 28 December 2024 11:36:10 -0500 (0:00:00.046) 0:03:00.596 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 28 December 2024 11:36:10 -0500 (0:00:00.041) 0:03:00.637 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 28 December 2024 11:36:10 -0500 (0:00:00.050) 0:03:00.687 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1735403385.845081, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1735403362.2548923, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3230249097", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 28 December 2024 11:36:10 -0500 (0:00:00.383) 0:03:01.071 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 28 December 2024 11:36:10 -0500 (0:00:00.042) 0:03:01.114 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 28 December 2024 11:36:10 -0500 (0:00:00.034) 0:03:01.148 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 28 December 2024 11:36:10 -0500 (0:00:00.035) 0:03:01.184 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 28 December 2024 11:36:10 -0500 (0:00:00.036) 0:03:01.221 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 28 December 2024 11:36:11 -0500 (0:00:00.042) 0:03:01.263 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 28 December 2024 11:36:11 -0500 (0:00:00.098) 0:03:01.362 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 28 December 2024 11:36:11 -0500 (0:00:00.036) 0:03:01.398 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 28 December 2024 11:36:11 -0500 (0:00:00.035) 0:03:01.434 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-demo-mysql-volume.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 28 December 2024 11:36:11 -0500 (0:00:00.063) 0:03:01.498 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 28 December 2024 11:36:11 -0500 (0:00:00.040) 0:03:01.538 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 28 December 2024 11:36:11 -0500 (0:00:00.035) 0:03:01.573 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.volume", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 28 December 2024 11:36:11 -0500 (0:00:00.079) 0:03:01.653 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 28 December 2024 11:36:11 -0500 (0:00:00.044) 0:03:01.698 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 28 December 2024 11:36:11 -0500 (0:00:00.078) 0:03:01.776 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 28 December 2024 11:36:11 -0500 (0:00:00.035) 0:03:01.812 ***** changed: [managed-node2] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-demo-mysql-volume.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2024-12-28 11:33:48 EST", "ActiveEnterTimestampMonotonic": "568940801", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "systemd-journald.socket network-online.target -.mount basic.target sysinit.target system.slice", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2024-12-28 11:33:48 EST", "AssertTimestampMonotonic": "568889920", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2024-12-28 11:33:48 EST", "ConditionTimestampMonotonic": "568889916", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-demo-mysql-volume.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveTasksMax": "22349", "ExecMainCode": "1", "ExecMainExitTimestamp": "Sat 2024-12-28 11:33:48 EST", "ExecMainExitTimestampMonotonic": "568940591", "ExecMainHandoffTimestamp": "Sat 2024-12-28 11:33:48 EST", "ExecMainHandoffTimestampMonotonic": "568904579", "ExecMainPID": "36887", "ExecMainStartTimestamp": "Sat 2024-12-28 11:33:48 EST", "ExecMainStartTimestampMonotonic": "568890801", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo-mysql-volume.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo-mysql-volume.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2024-12-28 11:33:48 EST", "InactiveExitTimestampMonotonic": "568891292", "InvocationID": "b2b1dbd9fc71489198186f3fdc5b2c48", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3144278016", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo-mysql-volume.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "system.slice sysinit.target -.mount", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.volume", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2024-12-28 11:33:48 EST", "StateChangeTimestampMonotonic": "568940801", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "exited", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo-mysql-volume", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 28 December 2024 11:36:12 -0500 (0:00:00.814) 0:03:02.627 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1735403627.2190623, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "585f8cbdf0ec73000f9227dcffbef71e9552ea4a", "ctime": 1735403627.2250624, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 662700309, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1735403626.9570613, "nlink": 1, "path": "/etc/containers/systemd/quadlet-demo-mysql.volume", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 9, "uid": 0, "version": "2945188429", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 28 December 2024 11:36:12 -0500 (0:00:00.404) 0:03:03.031 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 28 December 2024 11:36:12 -0500 (0:00:00.074) 0:03:03.106 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 28 December 2024 11:36:13 -0500 (0:00:00.367) 0:03:03.473 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 28 December 2024 11:36:13 -0500 (0:00:00.125) 0:03:03.599 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 28 December 2024 11:36:13 -0500 (0:00:00.037) 0:03:03.636 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 28 December 2024 11:36:13 -0500 (0:00:00.039) 0:03:03.676 ***** changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/quadlet-demo-mysql.volume", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 28 December 2024 11:36:13 -0500 (0:00:00.374) 0:03:04.050 ***** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 28 December 2024 11:36:14 -0500 (0:00:00.748) 0:03:04.799 ***** changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 28 December 2024 11:36:15 -0500 (0:00:00.448) 0:03:05.247 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 28 December 2024 11:36:15 -0500 (0:00:00.057) 0:03:05.305 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 28 December 2024 11:36:15 -0500 (0:00:00.036) 0:03:05.341 ***** changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.028727", "end": "2024-12-28 11:36:15.435449", "rc": 0, "start": "2024-12-28 11:36:15.406722" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 28 December 2024 11:36:15 -0500 (0:00:00.409) 0:03:05.750 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 28 December 2024 11:36:15 -0500 (0:00:00.066) 0:03:05.816 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 28 December 2024 11:36:15 -0500 (0:00:00.036) 0:03:05.853 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 28 December 2024 11:36:15 -0500 (0:00:00.042) 0:03:05.895 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 28 December 2024 11:36:15 -0500 (0:00:00.037) 0:03:05.932 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.029867", "end": "2024-12-28 11:36:16.019307", "rc": 0, "start": "2024-12-28 11:36:15.989440" } STDOUT: localhost/podman-pause 5.3.1-1733097600 6c58b1756bb7 6 minutes ago 701 kB quay.io/libpod/testimage 20210610 9f9ec7f2fdef 3 years ago 7.99 MB TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 28 December 2024 11:36:16 -0500 (0:00:00.407) 0:03:06.339 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.027308", "end": "2024-12-28 11:36:16.420985", "rc": 0, "start": "2024-12-28 11:36:16.393677" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 28 December 2024 11:36:16 -0500 (0:00:00.413) 0:03:06.753 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.033560", "end": "2024-12-28 11:36:16.873268", "rc": 0, "start": "2024-12-28 11:36:16.839708" } STDOUT: 56d61cd416db localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 001056497135-service d288a7e88646 localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp c97b04ddc09a-infra 3b2e8426a107 quay.io/libpod/testimage:20210610 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp httpd2-httpd2 0a5a42555773 localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 61ef7cf78877-service 4c761524f51e localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 0.0.0.0:15003->80/tcp 00b5192a1328-infra aa8d45554bfc quay.io/libpod/testimage:20210610 3 minutes ago Up 3 minutes 0.0.0.0:15003->80/tcp httpd3-httpd3 TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 28 December 2024 11:36:16 -0500 (0:00:00.440) 0:03:07.193 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.028792", "end": "2024-12-28 11:36:17.280019", "rc": 0, "start": "2024-12-28 11:36:17.251227" } STDOUT: podman podman-default-kube-network systemd-quadlet-demo TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 28 December 2024 11:36:17 -0500 (0:00:00.500) 0:03:07.693 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 28 December 2024 11:36:17 -0500 (0:00:00.458) 0:03:08.151 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 28 December 2024 11:36:18 -0500 (0:00:00.435) 0:03:08.587 ***** ok: [managed-node2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-demo-network.service": { "name": "quadlet-demo-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 28 December 2024 11:36:20 -0500 (0:00:01.941) 0:03:10.529 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 28 December 2024 11:36:20 -0500 (0:00:00.036) 0:03:10.565 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "quadlet-demo.network", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Network]\nSubnet=192.168.30.0/24\nGateway=192.168.30.1\nLabel=app=wordpress", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 28 December 2024 11:36:20 -0500 (0:00:00.071) 0:03:10.637 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 28 December 2024 11:36:20 -0500 (0:00:00.060) 0:03:10.698 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 28 December 2024 11:36:20 -0500 (0:00:00.043) 0:03:10.741 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo", "__podman_quadlet_type": "network", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 28 December 2024 11:36:20 -0500 (0:00:00.066) 0:03:10.807 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 28 December 2024 11:36:20 -0500 (0:00:00.066) 0:03:10.874 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 28 December 2024 11:36:20 -0500 (0:00:00.047) 0:03:10.921 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 28 December 2024 11:36:20 -0500 (0:00:00.050) 0:03:10.972 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 28 December 2024 11:36:20 -0500 (0:00:00.055) 0:03:11.028 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1735403385.845081, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1735403362.2548923, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3230249097", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 28 December 2024 11:36:21 -0500 (0:00:00.402) 0:03:11.430 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 28 December 2024 11:36:21 -0500 (0:00:00.039) 0:03:11.470 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 28 December 2024 11:36:21 -0500 (0:00:00.105) 0:03:11.575 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 28 December 2024 11:36:21 -0500 (0:00:00.035) 0:03:11.611 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 28 December 2024 11:36:21 -0500 (0:00:00.034) 0:03:11.646 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 28 December 2024 11:36:21 -0500 (0:00:00.036) 0:03:11.682 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 28 December 2024 11:36:21 -0500 (0:00:00.037) 0:03:11.719 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 28 December 2024 11:36:21 -0500 (0:00:00.035) 0:03:11.754 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 28 December 2024 11:36:21 -0500 (0:00:00.036) 0:03:11.790 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-demo-network.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 28 December 2024 11:36:21 -0500 (0:00:00.064) 0:03:11.854 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 28 December 2024 11:36:21 -0500 (0:00:00.049) 0:03:11.904 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 28 December 2024 11:36:21 -0500 (0:00:00.059) 0:03:11.963 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.network", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 28 December 2024 11:36:21 -0500 (0:00:00.132) 0:03:12.095 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 28 December 2024 11:36:21 -0500 (0:00:00.057) 0:03:12.153 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 28 December 2024 11:36:22 -0500 (0:00:00.101) 0:03:12.255 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 28 December 2024 11:36:22 -0500 (0:00:00.037) 0:03:12.292 ***** changed: [managed-node2] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-demo-network.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2024-12-28 11:33:43 EST", "ActiveEnterTimestampMonotonic": "563567684", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "system.slice systemd-journald.socket sysinit.target -.mount network-online.target basic.target", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2024-12-28 11:33:43 EST", "AssertTimestampMonotonic": "563525571", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2024-12-28 11:33:43 EST", "ConditionTimestampMonotonic": "563525567", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-demo-network.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveTasksMax": "22349", "ExecMainCode": "1", "ExecMainExitTimestamp": "Sat 2024-12-28 11:33:43 EST", "ExecMainExitTimestampMonotonic": "563567464", "ExecMainHandoffTimestamp": "Sat 2024-12-28 11:33:43 EST", "ExecMainHandoffTimestampMonotonic": "563535622", "ExecMainPID": "36058", "ExecMainStartTimestamp": "Sat 2024-12-28 11:33:43 EST", "ExecMainStartTimestampMonotonic": "563526479", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo-network.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo-network.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2024-12-28 11:33:43 EST", "InactiveExitTimestampMonotonic": "563526931", "InvocationID": "8d334ce42e784914a3995e4ec72f68e3", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3146190848", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo-network.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "-.mount system.slice sysinit.target", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo.network", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2024-12-28 11:33:43 EST", "StateChangeTimestampMonotonic": "563567684", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "exited", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo-network", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 28 December 2024 11:36:22 -0500 (0:00:00.827) 0:03:13.120 ***** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1735403621.8350422, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "e57c08d49aff4bae8daab138d913aeddaa8682a0", "ctime": 1735403621.8410423, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 616562899, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1735403621.4180408, "nlink": 1, "path": "/etc/containers/systemd/quadlet-demo.network", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 74, "uid": 0, "version": "3589024936", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 28 December 2024 11:36:23 -0500 (0:00:00.476) 0:03:13.596 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 28 December 2024 11:36:23 -0500 (0:00:00.078) 0:03:13.675 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 28 December 2024 11:36:23 -0500 (0:00:00.371) 0:03:14.046 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 28 December 2024 11:36:23 -0500 (0:00:00.067) 0:03:14.114 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 28 December 2024 11:36:23 -0500 (0:00:00.040) 0:03:14.155 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 28 December 2024 11:36:23 -0500 (0:00:00.037) 0:03:14.192 ***** changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/quadlet-demo.network", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 28 December 2024 11:36:24 -0500 (0:00:00.378) 0:03:14.571 ***** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 28 December 2024 11:36:25 -0500 (0:00:00.768) 0:03:15.339 ***** changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 28 December 2024 11:36:25 -0500 (0:00:00.422) 0:03:15.762 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 28 December 2024 11:36:25 -0500 (0:00:00.049) 0:03:15.812 ***** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 28 December 2024 11:36:25 -0500 (0:00:00.037) 0:03:15.849 ***** changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.028081", "end": "2024-12-28 11:36:25.932636", "rc": 0, "start": "2024-12-28 11:36:25.904555" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 28 December 2024 11:36:26 -0500 (0:00:00.399) 0:03:16.248 ***** included: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 28 December 2024 11:36:26 -0500 (0:00:00.069) 0:03:16.318 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 28 December 2024 11:36:26 -0500 (0:00:00.036) 0:03:16.355 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 28 December 2024 11:36:26 -0500 (0:00:00.083) 0:03:16.438 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 28 December 2024 11:36:26 -0500 (0:00:00.041) 0:03:16.479 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.029835", "end": "2024-12-28 11:36:26.563414", "rc": 0, "start": "2024-12-28 11:36:26.533579" } STDOUT: localhost/podman-pause 5.3.1-1733097600 6c58b1756bb7 6 minutes ago 701 kB quay.io/libpod/testimage 20210610 9f9ec7f2fdef 3 years ago 7.99 MB TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 28 December 2024 11:36:26 -0500 (0:00:00.399) 0:03:16.879 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.026127", "end": "2024-12-28 11:36:26.960903", "rc": 0, "start": "2024-12-28 11:36:26.934776" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 28 December 2024 11:36:27 -0500 (0:00:00.420) 0:03:17.299 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.032667", "end": "2024-12-28 11:36:27.425763", "rc": 0, "start": "2024-12-28 11:36:27.393096" } STDOUT: 56d61cd416db localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 001056497135-service d288a7e88646 localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp c97b04ddc09a-infra 3b2e8426a107 quay.io/libpod/testimage:20210610 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp httpd2-httpd2 0a5a42555773 localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 61ef7cf78877-service 4c761524f51e localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 0.0.0.0:15003->80/tcp 00b5192a1328-infra aa8d45554bfc quay.io/libpod/testimage:20210610 3 minutes ago Up 3 minutes 0.0.0.0:15003->80/tcp httpd3-httpd3 TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 28 December 2024 11:36:27 -0500 (0:00:00.462) 0:03:17.762 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.027729", "end": "2024-12-28 11:36:27.866108", "rc": 0, "start": "2024-12-28 11:36:27.838379" } STDOUT: podman podman-default-kube-network TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 28 December 2024 11:36:27 -0500 (0:00:00.432) 0:03:18.195 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 28 December 2024 11:36:28 -0500 (0:00:00.485) 0:03:18.680 ***** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 28 December 2024 11:36:28 -0500 (0:00:00.527) 0:03:19.207 ***** ok: [managed-node2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 28 December 2024 11:36:31 -0500 (0:00:02.201) 0:03:21.409 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Saturday 28 December 2024 11:36:31 -0500 (0:00:00.106) 0:03:21.517 ***** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Saturday 28 December 2024 11:36:31 -0500 (0:00:00.105) 0:03:21.622 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Saturday 28 December 2024 11:36:31 -0500 (0:00:00.061) 0:03:21.683 ***** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Ensure no resources] ***************************************************** task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:188 Saturday 28 December 2024 11:36:31 -0500 (0:00:00.076) 0:03:21.760 ***** fatal: [managed-node2]: FAILED! => { "assertion": "__podman_test_debug_images.stdout == \"\"", "changed": false, "evaluated_to": false } MSG: Assertion failed TASK [Debug] ******************************************************************* task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:199 Saturday 28 December 2024 11:36:31 -0500 (0:00:00.048) 0:03:21.809 ***** ok: [managed-node2] => { "changed": false, "cmd": "exec 1>&2\nset -x\nset -o pipefail\nsystemctl list-units --plain -l --all | grep quadlet || :\nsystemctl list-unit-files --all | grep quadlet || :\nsystemctl list-units --plain --failed -l --all | grep quadlet || :\n", "delta": "0:00:00.378205", "end": "2024-12-28 11:36:32.244653", "rc": 0, "start": "2024-12-28 11:36:31.866448" } STDERR: + set -o pipefail + systemctl list-units --plain -l --all + grep quadlet + : + systemctl list-unit-files --all + grep quadlet + : + grep quadlet + systemctl list-units --plain --failed -l --all + : TASK [Get journald] ************************************************************ task path: /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:209 Saturday 28 December 2024 11:36:32 -0500 (0:00:00.755) 0:03:22.565 ***** fatal: [managed-node2]: FAILED! => { "changed": false, "cmd": [ "journalctl", "-ex" ], "delta": "0:00:00.030772", "end": "2024-12-28 11:36:32.649536", "failed_when_result": true, "rc": 0, "start": "2024-12-28 11:36:32.618764" } STDOUT: Dec 28 11:32:00 managed-node2 python3.12[23098]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl enable-linger podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Dec 28 11:32:00 managed-node2 systemd[1]: Created slice user-3001.slice - User Slice of UID 3001. ░░ Subject: A start job for unit user-3001.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-3001.slice has finished successfully. ░░ ░░ The job identifier is 1983. Dec 28 11:32:00 managed-node2 systemd[1]: Starting user-runtime-dir@3001.service - User Runtime Directory /run/user/3001... ░░ Subject: A start job for unit user-runtime-dir@3001.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@3001.service has begun execution. ░░ ░░ The job identifier is 1905. Dec 28 11:32:00 managed-node2 systemd[1]: Finished user-runtime-dir@3001.service - User Runtime Directory /run/user/3001. ░░ Subject: A start job for unit user-runtime-dir@3001.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@3001.service has finished successfully. ░░ ░░ The job identifier is 1905. Dec 28 11:32:00 managed-node2 systemd[1]: Starting user@3001.service - User Manager for UID 3001... ░░ Subject: A start job for unit user@3001.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@3001.service has begun execution. ░░ ░░ The job identifier is 1985. Dec 28 11:32:00 managed-node2 systemd-logind[656]: New session 6 of user podman_basic_user. ░░ Subject: A new session 6 has been created for user podman_basic_user ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 6 has been created for the user podman_basic_user. ░░ ░░ The leading process of the session is 23102. Dec 28 11:32:00 managed-node2 (systemd)[23102]: pam_unix(systemd-user:session): session opened for user podman_basic_user(uid=3001) by podman_basic_user(uid=0) Dec 28 11:32:01 managed-node2 systemd[23102]: Queued start job for default target default.target. Dec 28 11:32:01 managed-node2 systemd[23102]: Created slice app.slice - User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 6. Dec 28 11:32:01 managed-node2 systemd[23102]: Started grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Dec 28 11:32:01 managed-node2 systemd[23102]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 11. Dec 28 11:32:01 managed-node2 systemd[23102]: Reached target paths.target - Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Dec 28 11:32:01 managed-node2 systemd[23102]: Reached target timers.target - Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 10. Dec 28 11:32:01 managed-node2 systemd[23102]: Starting dbus.socket - D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 9. Dec 28 11:32:01 managed-node2 systemd[23102]: Starting systemd-tmpfiles-setup.service - Create User Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 5. Dec 28 11:32:01 managed-node2 systemd[23102]: Finished systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 5. Dec 28 11:32:01 managed-node2 systemd[23102]: Listening on dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 9. Dec 28 11:32:01 managed-node2 systemd[23102]: Reached target sockets.target - Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 8. Dec 28 11:32:01 managed-node2 systemd[23102]: Reached target basic.target - Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Dec 28 11:32:01 managed-node2 systemd[23102]: Reached target default.target - Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Dec 28 11:32:01 managed-node2 systemd[1]: Started user@3001.service - User Manager for UID 3001. ░░ Subject: A start job for unit user@3001.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@3001.service has finished successfully. ░░ ░░ The job identifier is 1985. Dec 28 11:32:01 managed-node2 systemd[23102]: Startup finished in 75ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 3001 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 75683 microseconds. Dec 28 11:32:01 managed-node2 python3.12[23248]: ansible-file Invoked with path=/tmp/lsr_ib6aykrx_podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:32:02 managed-node2 python3.12[23379]: ansible-file Invoked with path=/tmp/lsr_ib6aykrx_podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:32:02 managed-node2 sudo[23552]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rgdhlmjoemtzzjbyxxchxyctylwqhfnq ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1735403522.317728-14463-56258380011442/AnsiballZ_podman_image.py' Dec 28 11:32:02 managed-node2 sudo[23552]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-23552) opened. Dec 28 11:32:02 managed-node2 sudo[23552]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Dec 28 11:32:02 managed-node2 systemd[23102]: Created slice session.slice - User Core Session Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 14. Dec 28 11:32:02 managed-node2 systemd[23102]: Starting dbus-broker.service - D-Bus User Message Bus... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Dec 28 11:32:02 managed-node2 dbus-broker-launch[23576]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Dec 28 11:32:02 managed-node2 dbus-broker-launch[23576]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Dec 28 11:32:02 managed-node2 systemd[23102]: Started dbus-broker.service - D-Bus User Message Bus. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Dec 28 11:32:02 managed-node2 dbus-broker-launch[23576]: Ready Dec 28 11:32:02 managed-node2 systemd[23102]: Created slice user.slice - Slice /user. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 20. Dec 28 11:32:02 managed-node2 systemd[23102]: Started podman-23563.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 19. Dec 28 11:32:02 managed-node2 systemd[23102]: Started podman-pause-c9301dca.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 23. Dec 28 11:32:02 managed-node2 systemd[23102]: Started podman-23578.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 27. Dec 28 11:32:04 managed-node2 systemd[23102]: Started podman-23604.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 31. Dec 28 11:32:04 managed-node2 sudo[23552]: pam_unix(sudo:session): session closed for user podman_basic_user Dec 28 11:32:05 managed-node2 python3.12[23742]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:32:05 managed-node2 python3.12[23873]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:32:06 managed-node2 python3.12[24004]: ansible-ansible.legacy.stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 28 11:32:06 managed-node2 python3.12[24109]: ansible-ansible.legacy.copy Invoked with dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml owner=podman_basic_user group=3001 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1735403525.7853103-14616-209831906785104/.source.yml _original_basename=.h8subroh follow=False checksum=16d1b24f2942ab6f5b4341d181237c71d76d7322 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:32:06 managed-node2 sudo[24282]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ogbxjrmnbrgzzxjfhubyswxitrvrggtx ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1735403526.5291655-14658-207237353690412/AnsiballZ_podman_play.py' Dec 28 11:32:06 managed-node2 sudo[24282]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-24282) opened. Dec 28 11:32:06 managed-node2 sudo[24282]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Dec 28 11:32:06 managed-node2 python3.12[24285]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Dec 28 11:32:07 managed-node2 systemd[23102]: Started podman-24292.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 35. Dec 28 11:32:07 managed-node2 systemd[23102]: Created slice user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice - cgroup user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 39. Dec 28 11:32:07 managed-node2 kernel: tun: Universal TUN/TAP device driver, 1.6 Dec 28 11:32:07 managed-node2 systemd[23102]: Started rootless-netns-405bf8df.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 43. Dec 28 11:32:07 managed-node2 kernel: bridge: filtering via arp/ip/ip6tables is no longer available by default. Update your scripts to load br_netfilter if you need this. Dec 28 11:32:07 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Dec 28 11:32:07 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Dec 28 11:32:07 managed-node2 kernel: veth0: entered allmulticast mode Dec 28 11:32:07 managed-node2 kernel: veth0: entered promiscuous mode Dec 28 11:32:07 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Dec 28 11:32:07 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Dec 28 11:32:07 managed-node2 systemd[23102]: Started run-rf8a740498c1244b69bf9eca2f69146fa.scope - /usr/libexec/podman/aardvark-dns --config /run/user/3001/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 47. Dec 28 11:32:07 managed-node2 aardvark-dns[24376]: starting aardvark on a child with pid 24377 Dec 28 11:32:07 managed-node2 aardvark-dns[24377]: Successfully parsed config Dec 28 11:32:07 managed-node2 aardvark-dns[24377]: Listen v4 ip {"podman-default-kube-network": [10.89.0.1]} Dec 28 11:32:07 managed-node2 aardvark-dns[24377]: Listen v6 ip {} Dec 28 11:32:07 managed-node2 aardvark-dns[24377]: Using the following upstream servers: [169.254.1.1:53, 10.29.169.13:53, 10.29.170.12:53] Dec 28 11:32:07 managed-node2 conmon[24392]: conmon ee4d55c4ad439a2f244f : failed to write to /proc/self/oom_score_adj: Permission denied Dec 28 11:32:07 managed-node2 systemd[23102]: Started libpod-conmon-ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 51. Dec 28 11:32:07 managed-node2 conmon[24393]: conmon ee4d55c4ad439a2f244f : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/14/attach} Dec 28 11:32:07 managed-node2 conmon[24393]: conmon ee4d55c4ad439a2f244f : terminal_ctrl_fd: 14 Dec 28 11:32:07 managed-node2 conmon[24393]: conmon ee4d55c4ad439a2f244f : winsz read side: 17, winsz write side: 18 Dec 28 11:32:07 managed-node2 systemd[23102]: Started libpod-ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 56. Dec 28 11:32:07 managed-node2 conmon[24393]: conmon ee4d55c4ad439a2f244f : container PID: 24395 Dec 28 11:32:07 managed-node2 conmon[24397]: conmon 03382f1a259308bdd4d4 : failed to write to /proc/self/oom_score_adj: Permission denied Dec 28 11:32:07 managed-node2 systemd[23102]: Started libpod-conmon-03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 61. Dec 28 11:32:07 managed-node2 conmon[24398]: conmon 03382f1a259308bdd4d4 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/13/attach} Dec 28 11:32:07 managed-node2 conmon[24398]: conmon 03382f1a259308bdd4d4 : terminal_ctrl_fd: 13 Dec 28 11:32:07 managed-node2 conmon[24398]: conmon 03382f1a259308bdd4d4 : winsz read side: 16, winsz write side: 17 Dec 28 11:32:07 managed-node2 systemd[23102]: Started libpod-03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 66. Dec 28 11:32:07 managed-node2 conmon[24398]: conmon 03382f1a259308bdd4d4 : container PID: 24400 Dec 28 11:32:07 managed-node2 python3.12[24285]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Dec 28 11:32:07 managed-node2 python3.12[24285]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod: 7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d Container: 03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2 Dec 28 11:32:07 managed-node2 python3.12[24285]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2024-12-28T11:32:06-05:00" level=info msg="/bin/podman filtering at log level debug" time="2024-12-28T11:32:06-05:00" level=debug msg="Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2024-12-28T11:32:06-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2024-12-28T11:32:06-05:00" level=info msg="Using sqlite as database backend" time="2024-12-28T11:32:06-05:00" level=debug msg="systemd-logind: Unknown object '/'." time="2024-12-28T11:32:06-05:00" level=debug msg="Using graph driver overlay" time="2024-12-28T11:32:06-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" time="2024-12-28T11:32:06-05:00" level=debug msg="Using run root /run/user/3001/containers" time="2024-12-28T11:32:06-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" time="2024-12-28T11:32:06-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" time="2024-12-28T11:32:06-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" time="2024-12-28T11:32:06-05:00" level=debug msg="Using transient store: false" time="2024-12-28T11:32:06-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2024-12-28T11:32:06-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2024-12-28T11:32:06-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2024-12-28T11:32:06-05:00" level=debug msg="Cached value indicated that metacopy is not being used" time="2024-12-28T11:32:06-05:00" level=debug msg="Cached value indicated that native-diff is usable" time="2024-12-28T11:32:06-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" time="2024-12-28T11:32:06-05:00" level=debug msg="Initializing event backend file" time="2024-12-28T11:32:06-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2024-12-28T11:32:06-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2024-12-28T11:32:06-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2024-12-28T11:32:06-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2024-12-28T11:32:06-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2024-12-28T11:32:06-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" time="2024-12-28T11:32:06-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" time="2024-12-28T11:32:06-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2024-12-28T11:32:06-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2024-12-28T11:32:06-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" time="2024-12-28T11:32:06-05:00" level=info msg="Setting parallel job count to 7" time="2024-12-28T11:32:07-05:00" level=debug msg="Successfully loaded 1 networks" time="2024-12-28T11:32:07-05:00" level=debug msg="found free device name podman1" time="2024-12-28T11:32:07-05:00" level=debug msg="found free ipv4 network subnet 10.89.0.0/24" time="2024-12-28T11:32:07-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2024-12-28T11:32:07-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-28T11:32:07-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2024-12-28T11:32:07-05:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\" does not resolve to an image ID" time="2024-12-28T11:32:07-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2024-12-28T11:32:07-05:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\" does not resolve to an image ID" time="2024-12-28T11:32:07-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2024-12-28T11:32:07-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-28T11:32:07-05:00" level=debug msg="FROM \"scratch\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are not supported" time="2024-12-28T11:32:07-05:00" level=debug msg="Check for idmapped mounts support " time="2024-12-28T11:32:07-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-28T11:32:07-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-28T11:32:07-05:00" level=debug msg="overlay: test mount indicated that volatile is being used" time="2024-12-28T11:32:07-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/361431f1ed08cdfc585930dbaa2760a965361a9e561ade0ae2cef3e4dac61ffa/empty,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/361431f1ed08cdfc585930dbaa2760a965361a9e561ade0ae2cef3e4dac61ffa/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/361431f1ed08cdfc585930dbaa2760a965361a9e561ade0ae2cef3e4dac61ffa/work,userxattr,volatile,context=\"system_u:object_r:container_file_t:s0:c76,c206\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Container ID: 0d3bc7ba555d31c991e87ce78e7ce6c5bbd8670f62fd63dd12c1d87b49814b3f" time="2024-12-28T11:32:07-05:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:copy Args:[/usr/libexec/podman/catatonit /catatonit] Flags:[] Attrs:map[] Message:COPY /usr/libexec/podman/catatonit /catatonit Heredocs:[] Original:COPY /usr/libexec/podman/catatonit /catatonit}" time="2024-12-28T11:32:07-05:00" level=debug msg="COPY []string(nil), imagebuilder.Copy{FromFS:false, From:\"\", Src:[]string{\"/usr/libexec/podman/catatonit\"}, Dest:\"/catatonit\", Download:false, Chown:\"\", Chmod:\"\", Checksum:\"\", Files:[]imagebuilder.File(nil), KeepGitDir:false, Link:false, Parents:false, Excludes:[]string(nil)}" time="2024-12-28T11:32:07-05:00" level=debug msg="EnsureContainerPath \"/\" (owner \"\", mode 0) in \"0d3bc7ba555d31c991e87ce78e7ce6c5bbd8670f62fd63dd12c1d87b49814b3f\"" time="2024-12-28T11:32:07-05:00" level=debug msg="added content file:872045bc026632654869cebc14f91fe531969760bf26925e889d9b70527c8b67" time="2024-12-28T11:32:07-05:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:entrypoint Args:[/catatonit -P] Flags:[] Attrs:map[json:true] Message:ENTRYPOINT /catatonit -P Heredocs:[] Original:ENTRYPOINT [\"/catatonit\", \"-P\"]}" time="2024-12-28T11:32:07-05:00" level=debug msg="EnsureContainerPath \"/\" (owner \"\", mode 0) in \"0d3bc7ba555d31c991e87ce78e7ce6c5bbd8670f62fd63dd12c1d87b49814b3f\"" time="2024-12-28T11:32:07-05:00" level=debug msg="COMMIT localhost/podman-pause:5.3.1-1733097600" time="2024-12-28T11:32:07-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\"" time="2024-12-28T11:32:07-05:00" level=debug msg="COMMIT \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\"" time="2024-12-28T11:32:07-05:00" level=debug msg="committing image with reference \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\" is allowed by policy" time="2024-12-28T11:32:07-05:00" level=debug msg="layer list: [\"361431f1ed08cdfc585930dbaa2760a965361a9e561ade0ae2cef3e4dac61ffa\"]" time="2024-12-28T11:32:07-05:00" level=debug msg="using \"/var/tmp/buildah1759970581\" to hold temporary data" time="2024-12-28T11:32:07-05:00" level=debug msg="Tar with options on /home/podman_basic_user/.local/share/containers/storage/overlay/361431f1ed08cdfc585930dbaa2760a965361a9e561ade0ae2cef3e4dac61ffa/diff" time="2024-12-28T11:32:07-05:00" level=debug msg="layer \"361431f1ed08cdfc585930dbaa2760a965361a9e561ade0ae2cef3e4dac61ffa\" size is 699392 bytes, uncompressed digest sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6, possibly-compressed digest sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6" time="2024-12-28T11:32:07-05:00" level=debug msg="OCIv1 config = {\"created\":\"2024-12-28T16:32:07.181184277Z\",\"architecture\":\"amd64\",\"os\":\"linux\",\"config\":{\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Entrypoint\":[\"/catatonit\",\"-P\"],\"WorkingDir\":\"/\",\"Labels\":{\"io.buildah.version\":\"1.38.0\"}},\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"]},\"history\":[{\"created\":\"2024-12-28T16:32:07.152309955Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:872045bc026632654869cebc14f91fe531969760bf26925e889d9b70527c8b67 in /catatonit \",\"empty_layer\":true},{\"created\":\"2024-12-28T16:32:07.184345282Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2024-12-28T11:32:07-05:00" level=debug msg="OCIv1 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.oci.image.manifest.v1+json\",\"config\":{\"mediaType\":\"application/vnd.oci.image.config.v1+json\",\"digest\":\"sha256:428ac09400e79967b8fc99b17ae35fbdf1a637b3886c29eadb823d54e055a980\",\"size\":685},\"layers\":[{\"mediaType\":\"application/vnd.oci.image.layer.v1.tar\",\"digest\":\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\",\"size\":699392}],\"annotations\":{\"org.opencontainers.image.base.digest\":\"\",\"org.opencontainers.image.base.name\":\"\"}}" time="2024-12-28T11:32:07-05:00" level=debug msg="Docker v2s2 config = {\"created\":\"2024-12-28T16:32:07.181184277Z\",\"container\":\"0d3bc7ba555d31c991e87ce78e7ce6c5bbd8670f62fd63dd12c1d87b49814b3f\",\"container_config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":null,\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"/\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.38.0\"}},\"config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":null,\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"/\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.38.0\"}},\"architecture\":\"amd64\",\"os\":\"linux\",\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"]},\"history\":[{\"created\":\"2024-12-28T16:32:07.152309955Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:872045bc026632654869cebc14f91fe531969760bf26925e889d9b70527c8b67 in /catatonit \",\"empty_layer\":true},{\"created\":\"2024-12-28T16:32:07.184345282Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2024-12-28T11:32:07-05:00" level=debug msg="Docker v2s2 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.docker.distribution.manifest.v2+json\",\"config\":{\"mediaType\":\"application/vnd.docker.container.image.v1+json\",\"size\":1348,\"digest\":\"sha256:0d3ba54be8a6a485e97578cafe10afb2547d3ae0763800b2e1e4bf045053253c\"},\"layers\":[{\"mediaType\":\"application/vnd.docker.image.rootfs.diff.tar\",\"size\":699392,\"digest\":\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"}]}" time="2024-12-28T11:32:07-05:00" level=debug msg="Using SQLite blob info cache at /home/podman_basic_user/.local/share/containers/cache/blob-info-cache-v1.sqlite" time="2024-12-28T11:32:07-05:00" level=debug msg="IsRunningImageAllowed for image containers-storage:" time="2024-12-28T11:32:07-05:00" level=debug msg=" Using transport \"containers-storage\" policy section \"\"" time="2024-12-28T11:32:07-05:00" level=debug msg=" Requirement 0: allowed" time="2024-12-28T11:32:07-05:00" level=debug msg="Overall: allowed" time="2024-12-28T11:32:07-05:00" level=debug msg="start reading config" time="2024-12-28T11:32:07-05:00" level=debug msg="finished reading config" time="2024-12-28T11:32:07-05:00" level=debug msg="Manifest has MIME type application/vnd.oci.image.manifest.v1+json, ordered candidate list [application/vnd.oci.image.manifest.v1+json, application/vnd.docker.distribution.manifest.v2+json, application/vnd.docker.distribution.manifest.v1+prettyjws, application/vnd.docker.distribution.manifest.v1+json]" time="2024-12-28T11:32:07-05:00" level=debug msg="... will first try using the original manifest unmodified" time="2024-12-28T11:32:07-05:00" level=debug msg="Checking if we can reuse blob sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6: general substitution = true, compression for MIME type \"application/vnd.oci.image.layer.v1.tar\" = true" time="2024-12-28T11:32:07-05:00" level=debug msg="reading layer \"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"" time="2024-12-28T11:32:07-05:00" level=debug msg="No compression detected" time="2024-12-28T11:32:07-05:00" level=debug msg="Using original blob without modification" time="2024-12-28T11:32:07-05:00" level=debug msg="Applying tar in /home/podman_basic_user/.local/share/containers/storage/overlay/83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6/diff" time="2024-12-28T11:32:07-05:00" level=debug msg="finished reading layer \"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"" time="2024-12-28T11:32:07-05:00" level=debug msg="No compression detected" time="2024-12-28T11:32:07-05:00" level=debug msg="Compression change for blob sha256:428ac09400e79967b8fc99b17ae35fbdf1a637b3886c29eadb823d54e055a980 (\"application/vnd.oci.image.config.v1+json\") not supported" time="2024-12-28T11:32:07-05:00" level=debug msg="Using original blob without modification" time="2024-12-28T11:32:07-05:00" level=debug msg="setting image creation date to 2024-12-28 16:32:07.181184277 +0000 UTC" time="2024-12-28T11:32:07-05:00" level=debug msg="created new image ID \"428ac09400e79967b8fc99b17ae35fbdf1a637b3886c29eadb823d54e055a980\" with metadata \"{}\"" time="2024-12-28T11:32:07-05:00" level=debug msg="added name \"localhost/podman-pause:5.3.1-1733097600\" to image \"428ac09400e79967b8fc99b17ae35fbdf1a637b3886c29eadb823d54e055a980\"" time="2024-12-28T11:32:07-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\"" time="2024-12-28T11:32:07-05:00" level=debug msg="printing final image id \"428ac09400e79967b8fc99b17ae35fbdf1a637b3886c29eadb823d54e055a980\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Pod using bridge network mode" time="2024-12-28T11:32:07-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice for parent user.slice and name libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d" time="2024-12-28T11:32:07-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice" time="2024-12-28T11:32:07-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice" time="2024-12-28T11:32:07-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2024-12-28T11:32:07-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-28T11:32:07-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2024-12-28T11:32:07-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@428ac09400e79967b8fc99b17ae35fbdf1a637b3886c29eadb823d54e055a980\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2024-12-28T11:32:07-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@428ac09400e79967b8fc99b17ae35fbdf1a637b3886c29eadb823d54e055a980)" time="2024-12-28T11:32:07-05:00" level=debug msg="exporting opaque data as blob \"sha256:428ac09400e79967b8fc99b17ae35fbdf1a637b3886c29eadb823d54e055a980\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Inspecting image 428ac09400e79967b8fc99b17ae35fbdf1a637b3886c29eadb823d54e055a980" time="2024-12-28T11:32:07-05:00" level=debug msg="exporting opaque data as blob \"sha256:428ac09400e79967b8fc99b17ae35fbdf1a637b3886c29eadb823d54e055a980\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Inspecting image 428ac09400e79967b8fc99b17ae35fbdf1a637b3886c29eadb823d54e055a980" time="2024-12-28T11:32:07-05:00" level=debug msg="Inspecting image 428ac09400e79967b8fc99b17ae35fbdf1a637b3886c29eadb823d54e055a980" time="2024-12-28T11:32:07-05:00" level=debug msg="using systemd mode: false" time="2024-12-28T11:32:07-05:00" level=debug msg="setting container name 7a1a570a1a21-infra" time="2024-12-28T11:32:07-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network f77e9465d7924639ca311fa0051f20b3a5be9d142b3a20f85bf2c970144501c8 bridge podman1 2024-12-28 11:32:07.00472767 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2024-12-28T11:32:07-05:00" level=debug msg="Successfully loaded 2 networks" time="2024-12-28T11:32:07-05:00" level=debug msg="Allocated lock 1 for container ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b" time="2024-12-28T11:32:07-05:00" level=debug msg="exporting opaque data as blob \"sha256:428ac09400e79967b8fc99b17ae35fbdf1a637b3886c29eadb823d54e055a980\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Created container \"ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Container \"ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b/userdata\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Container \"ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b\" has run directory \"/run/user/3001/containers/overlay-containers/ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b/userdata\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-28T11:32:07-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-28T11:32:07-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-12-28T11:32:07-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-28T11:32:07-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-12-28T11:32:07-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2024-12-28T11:32:07-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-28T11:32:07-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-28T11:32:07-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-12-28T11:32:07-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-28T11:32:07-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-12-28T11:32:07-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-28T11:32:07-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-28T11:32:07-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-12-28T11:32:07-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-28T11:32:07-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-12-28T11:32:07-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-12-28T11:32:07-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:07-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-28T11:32:07-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-28T11:32:07-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-12-28T11:32:07-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-28T11:32:07-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-12-28T11:32:07-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-12-28T11:32:07-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:07-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-12-28T11:32:07-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-12-28T11:32:07-05:00" level=debug msg="using systemd mode: false" time="2024-12-28T11:32:07-05:00" level=debug msg="adding container to pod httpd1" time="2024-12-28T11:32:07-05:00" level=debug msg="setting container name httpd1-httpd1" time="2024-12-28T11:32:07-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2024-12-28T11:32:07-05:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2024-12-28T11:32:07-05:00" level=debug msg="Adding mount /proc" time="2024-12-28T11:32:07-05:00" level=debug msg="Adding mount /dev" time="2024-12-28T11:32:07-05:00" level=debug msg="Adding mount /dev/pts" time="2024-12-28T11:32:07-05:00" level=debug msg="Adding mount /dev/mqueue" time="2024-12-28T11:32:07-05:00" level=debug msg="Adding mount /sys" time="2024-12-28T11:32:07-05:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2024-12-28T11:32:07-05:00" level=debug msg="Allocated lock 2 for container 03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2" time="2024-12-28T11:32:07-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Created container \"03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Container \"03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2/userdata\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Container \"03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2\" has run directory \"/run/user/3001/containers/overlay-containers/03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2/userdata\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Strongconnecting node ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b" time="2024-12-28T11:32:07-05:00" level=debug msg="Pushed ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b onto stack" time="2024-12-28T11:32:07-05:00" level=debug msg="Finishing node ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b. Popped ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b off stack" time="2024-12-28T11:32:07-05:00" level=debug msg="Strongconnecting node 03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2" time="2024-12-28T11:32:07-05:00" level=debug msg="Pushed 03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2 onto stack" time="2024-12-28T11:32:07-05:00" level=debug msg="Finishing node 03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2. Popped 03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2 off stack" time="2024-12-28T11:32:07-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/G7M3WBATRW5IXUOIAED7NGBVIG,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/e16fd0b0e1772b4edc3ed7bf147feb483b8414b0e0edf5be9b08a9cf0c14cd25/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/e16fd0b0e1772b4edc3ed7bf147feb483b8414b0e0edf5be9b08a9cf0c14cd25/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c860,c1021\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Made network namespace at /run/user/3001/netns/netns-e02ac7b3-b1af-7c3d-8942-5b7b98bce497 for container ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b" time="2024-12-28T11:32:07-05:00" level=debug msg="Mounted container \"ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/e16fd0b0e1772b4edc3ed7bf147feb483b8414b0e0edf5be9b08a9cf0c14cd25/merged\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Created root filesystem for container ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b at /home/podman_basic_user/.local/share/containers/storage/overlay/e16fd0b0e1772b4edc3ed7bf147feb483b8414b0e0edf5be9b08a9cf0c14cd25/merged" time="2024-12-28T11:32:07-05:00" level=debug msg="Creating rootless network namespace at \"/run/user/3001/containers/networks/rootless-netns/rootless-netns\"" time="2024-12-28T11:32:07-05:00" level=debug msg="pasta arguments: --config-net --pid /run/user/3001/containers/networks/rootless-netns/rootless-netns-conn.pid --dns-forward 169.254.1.1 -t none -u none -T none -U none --no-map-gw --quiet --netns /run/user/3001/containers/networks/rootless-netns/rootless-netns --map-guest-addr 169.254.1.2" time="2024-12-28T11:32:07-05:00" level=debug msg="The path of /etc/resolv.conf in the mount ns is \"/etc/resolv.conf\"" [DEBUG netavark::network::validation] Validating network namespace... [DEBUG netavark::commands::setup] Setting up... [INFO netavark::firewall] Using nftables firewall driver [DEBUG netavark::network::bridge] Setup network podman-default-kube-network [DEBUG netavark::network::bridge] Container interface name: eth0 with IP addresses [10.89.0.2/24] [DEBUG netavark::network::bridge] Bridge name: podman1 with IP addresses [10.89.0.1/24] [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.ip_forward to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/podman1/rp_filter to 2 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv6/conf/eth0/autoconf to 0 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/arp_notify to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/rp_filter to 2 [INFO netavark::network::netlink] Adding route (dest: 0.0.0.0/0 ,gw: 10.89.0.1, metric 100) [INFO netavark::firewall::nft] Creating container chain nv_f77e9465_10_89_0_0_nm24 [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.conf.podman1.route_localnet to 1 [DEBUG netavark::dns::aardvark] Spawning aardvark server [DEBUG netavark::dns::aardvark] start aardvark-dns: ["systemd-run", "-q", "--scope", "--user", "/usr/libexec/podman/aardvark-dns", "--config", "/run/user/3001/containers/networks/aardvark-dns", "-p", "53", "run"] [DEBUG netavark::commands::setup] { "podman-default-kube-network": StatusBlock { dns_search_domains: Some( [ "dns.podman", ], ), dns_server_ips: Some( [ 10.89.0.1, ], ), interfaces: Some( { "eth0": NetInterface { mac_address: "a2:dd:67:c6:99:cc", subnets: Some( [ NetAddress { gateway: Some( 10.89.0.1, ), ipnet: 10.89.0.2/24, }, ], ), }, }, ), }, } [DEBUG netavark::commands::setup] Setup complete time="2024-12-28T11:32:07-05:00" level=debug msg="rootlessport: time=\"2024-12-28T11:32:07-05:00\" level=info msg=\"Starting parent driver\"\n" time="2024-12-28T11:32:07-05:00" level=debug msg="rootlessport: time=\"2024-12-28T11:32:07-05:00\" level=info msg=\"opaque=map[builtin.readypipepath:/run/user/3001/libpod/tmp/rootlessport911175231/.bp-ready.pipe builtin.socketpath:/run/user/3001/libpod/tmp/rootlessport911175231/.bp.sock]\"\n" time="2024-12-28T11:32:07-05:00" level=debug msg="rootlessport: time=\"2024-12-28T11:32:07-05:00\" level=info msg=\"Starting child driver in child netns (\\\"/proc/self/exe\\\" [rootlessport-child])\"\n" time="2024-12-28T11:32:07-05:00" level=debug msg="rootlessport: time=\"2024-12-28T11:32:07-05:00\" level=info msg=\"Waiting for initComplete\"\n" time="2024-12-28T11:32:07-05:00" level=debug msg="rootlessport is ready" time="2024-12-28T11:32:07-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2024-12-28T11:32:07-05:00" level=debug msg="Setting Cgroups for container ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b to user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice:libpod:ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b" time="2024-12-28T11:32:07-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2024-12-28T11:32:07-05:00" level=debug msg="Workdir \"/\" resolved to host path \"/home/podman_basic_user/.local/share/containers/storage/overlay/e16fd0b0e1772b4edc3ed7bf147feb483b8414b0e0edf5be9b08a9cf0c14cd25/merged\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Created OCI spec for container ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b/userdata/config.json" time="2024-12-28T11:32:07-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice for parent user.slice and name libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d" time="2024-12-28T11:32:07-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice" time="2024-12-28T11:32:07-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice" time="2024-12-28T11:32:07-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2024-12-28T11:32:07-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b -u ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b -r /usr/bin/crun -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b/userdata -p /run/user/3001/containers/overlay-containers/ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b/userdata/pidfile -n 7a1a570a1a21-infra --exit-dir /run/user/3001/libpod/tmp/exits --persist-dir /run/user/3001/libpod/tmp/persist/ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b --full-attach -s -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b]" time="2024-12-28T11:32:07-05:00" level=info msg="Running conmon under slice user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice and unitName libpod-conmon-ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b.scope" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2024-12-28T11:32:07-05:00" level=debug msg="Received: 24395" time="2024-12-28T11:32:07-05:00" level=info msg="Got Conmon PID as 24393" time="2024-12-28T11:32:07-05:00" level=debug msg="Created container ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b in OCI runtime" time="2024-12-28T11:32:07-05:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2024-12-28T11:32:07-05:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2024-12-28T11:32:07-05:00" level=debug msg="Starting container ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b with command [/catatonit -P]" time="2024-12-28T11:32:07-05:00" level=debug msg="Started container ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b" time="2024-12-28T11:32:07-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/JLDUA4NZGI76ZRR5HOENKUUFPL,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/b8dd3fe1cea54d982422f8df2088f813356c2938779d247908531112cfbc0cc4/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/b8dd3fe1cea54d982422f8df2088f813356c2938779d247908531112cfbc0cc4/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c860,c1021\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Mounted container \"03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/b8dd3fe1cea54d982422f8df2088f813356c2938779d247908531112cfbc0cc4/merged\"" time="2024-12-28T11:32:07-05:00" level=debug msg="Created root filesystem for container 03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2 at /home/podman_basic_user/.local/share/containers/storage/overlay/b8dd3fe1cea54d982422f8df2088f813356c2938779d247908531112cfbc0cc4/merged" time="2024-12-28T11:32:07-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2024-12-28T11:32:07-05:00" level=debug msg="Setting Cgroups for container 03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2 to user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice:libpod:03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2" time="2024-12-28T11:32:07-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2024-12-28T11:32:07-05:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2024-12-28T11:32:07-05:00" level=debug msg="Created OCI spec for container 03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2/userdata/config.json" time="2024-12-28T11:32:07-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice for parent user.slice and name libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d" time="2024-12-28T11:32:07-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice" time="2024-12-28T11:32:07-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice" time="2024-12-28T11:32:07-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2024-12-28T11:32:07-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c 03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2 -u 03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2 -r /usr/bin/crun -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2/userdata -p /run/user/3001/containers/overlay-containers/03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2/userdata/pidfile -n httpd1-httpd1 --exit-dir /run/user/3001/libpod/tmp/exits --persist-dir /run/user/3001/libpod/tmp/persist/03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2 --full-attach -s -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2]" time="2024-12-28T11:32:07-05:00" level=info msg="Running conmon under slice user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice and unitName libpod-conmon-03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2.scope" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2024-12-28T11:32:07-05:00" level=debug msg="Received: 24400" time="2024-12-28T11:32:07-05:00" level=info msg="Got Conmon PID as 24398" time="2024-12-28T11:32:07-05:00" level=debug msg="Created container 03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2 in OCI runtime" time="2024-12-28T11:32:07-05:00" level=debug msg="Starting container 03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2 with command [/bin/busybox-extras httpd -f -p 80]" time="2024-12-28T11:32:07-05:00" level=debug msg="Started container 03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2" time="2024-12-28T11:32:07-05:00" level=debug msg="Called kube.PersistentPostRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2024-12-28T11:32:07-05:00" level=debug msg="Shutting down engines" time="2024-12-28T11:32:07-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=24292 Dec 28 11:32:07 managed-node2 python3.12[24285]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Dec 28 11:32:07 managed-node2 sudo[24282]: pam_unix(sudo:session): session closed for user podman_basic_user Dec 28 11:32:08 managed-node2 sudo[24574]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nbvwsesvdxwwxbvavqduinwrpbjqzjgm ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1735403527.9234824-14730-128175966223340/AnsiballZ_systemd.py' Dec 28 11:32:08 managed-node2 sudo[24574]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-24574) opened. Dec 28 11:32:08 managed-node2 sudo[24574]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Dec 28 11:32:08 managed-node2 python3.12[24577]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 28 11:32:08 managed-node2 systemd[23102]: Reload requested from client PID 24578 ('systemctl')... Dec 28 11:32:08 managed-node2 systemd[23102]: Reloading... Dec 28 11:32:08 managed-node2 systemd[23102]: Reloading finished in 46 ms. Dec 28 11:32:08 managed-node2 sudo[24574]: pam_unix(sudo:session): session closed for user podman_basic_user Dec 28 11:32:08 managed-node2 sudo[24760]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rsufcjaytzfuvsdvktjmvixagihcdkaj ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1735403528.7678657-14768-215863873770949/AnsiballZ_systemd.py' Dec 28 11:32:08 managed-node2 sudo[24760]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-24760) opened. Dec 28 11:32:09 managed-node2 sudo[24760]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Dec 28 11:32:09 managed-node2 python3.12[24763]: ansible-systemd Invoked with name=podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service scope=user enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Dec 28 11:32:09 managed-node2 systemd[23102]: Reload requested from client PID 24766 ('systemctl')... Dec 28 11:32:09 managed-node2 systemd[23102]: Reloading... Dec 28 11:32:09 managed-node2 systemd[23102]: Reloading finished in 45 ms. Dec 28 11:32:09 managed-node2 sudo[24760]: pam_unix(sudo:session): session closed for user podman_basic_user Dec 28 11:32:09 managed-node2 sudo[24948]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wjayerfcwttzlpplhdotmgqxfpzwtmsn ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1735403529.5767055-14819-31138203494993/AnsiballZ_systemd.py' Dec 28 11:32:09 managed-node2 sudo[24948]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-24948) opened. Dec 28 11:32:09 managed-node2 sudo[24948]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Dec 28 11:32:10 managed-node2 python3.12[24951]: ansible-systemd Invoked with name=podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Dec 28 11:32:10 managed-node2 systemd[23102]: Created slice app-podman\x2dkube.slice - Slice /app/podman-kube. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 72. Dec 28 11:32:10 managed-node2 systemd[23102]: Starting podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 71. Dec 28 11:32:10 managed-node2 conmon[24393]: conmon ee4d55c4ad439a2f244f : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice/libpod-ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b.scope/container/memory.events Dec 28 11:32:10 managed-node2 aardvark-dns[24377]: Received SIGHUP Dec 28 11:32:10 managed-node2 aardvark-dns[24377]: Successfully parsed config Dec 28 11:32:10 managed-node2 aardvark-dns[24377]: Listen v4 ip {} Dec 28 11:32:10 managed-node2 aardvark-dns[24377]: Listen v6 ip {} Dec 28 11:32:10 managed-node2 aardvark-dns[24377]: No configuration found stopping the sever Dec 28 11:32:10 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Dec 28 11:32:10 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Dec 28 11:32:10 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Dec 28 11:32:10 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b)" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=info msg="Using sqlite as database backend" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="systemd-logind: Unknown object '/'." Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Using graph driver overlay" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Using run root /run/user/3001/containers" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Using transient store: false" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Cached value indicated that overlay is supported" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Cached value indicated that overlay is supported" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Cached value indicated that metacopy is not being used" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Cached value indicated that native-diff is usable" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Initializing event backend file" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=info msg="Setting parallel job count to 7" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only ee4d55c4ad439a2f244f39c321b1889b19552f067ddcda8e2e8560dbc79cce8b)" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=debug msg="Shutting down engines" Dec 28 11:32:10 managed-node2 /usr/bin/podman[24964]: time="2024-12-28T11:32:10-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=24964 Dec 28 11:32:20 managed-node2 podman[24954]: time="2024-12-28T11:32:20-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd1-httpd1 in 10 seconds, resorting to SIGKILL" Dec 28 11:32:20 managed-node2 conmon[24398]: conmon 03382f1a259308bdd4d4 : container 24400 exited with status 137 Dec 28 11:32:20 managed-node2 /usr/bin/podman[24983]: time="2024-12-28T11:32:20-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only 03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2)" Dec 28 11:32:20 managed-node2 /usr/bin/podman[24983]: time="2024-12-28T11:32:20-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Dec 28 11:32:20 managed-node2 /usr/bin/podman[24983]: time="2024-12-28T11:32:20-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Dec 28 11:32:20 managed-node2 /usr/bin/podman[24983]: time="2024-12-28T11:32:20-05:00" level=info msg="Using sqlite as database backend" Dec 28 11:32:20 managed-node2 /usr/bin/podman[24983]: time="2024-12-28T11:32:20-05:00" level=debug msg="systemd-logind: Unknown object '/'." Dec 28 11:32:20 managed-node2 /usr/bin/podman[24983]: time="2024-12-28T11:32:20-05:00" level=debug msg="Using graph driver overlay" Dec 28 11:32:20 managed-node2 /usr/bin/podman[24983]: time="2024-12-28T11:32:20-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" Dec 28 11:32:20 managed-node2 /usr/bin/podman[24983]: time="2024-12-28T11:32:20-05:00" level=debug msg="Using run root /run/user/3001/containers" Dec 28 11:32:20 managed-node2 /usr/bin/podman[24983]: time="2024-12-28T11:32:20-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Dec 28 11:32:20 managed-node2 /usr/bin/podman[24983]: time="2024-12-28T11:32:20-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Dec 28 11:32:20 managed-node2 /usr/bin/podman[24983]: time="2024-12-28T11:32:20-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Dec 28 11:32:20 managed-node2 /usr/bin/podman[24983]: time="2024-12-28T11:32:20-05:00" level=debug msg="Using transient store: false" Dec 28 11:32:20 managed-node2 systemd[23102]: Stopping libpod-conmon-03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2.scope... ░░ Subject: A stop job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has begun execution. ░░ ░░ The job identifier is 85. Dec 28 11:32:20 managed-node2 systemd[23102]: Stopped libpod-conmon-03382f1a259308bdd4d4c3ac85c2fcf045b1efc0e5f3106247edff3f851861e2.scope. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 85 and the job result is done. Dec 28 11:32:20 managed-node2 systemd[23102]: Removed slice user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice - cgroup user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 84 and the job result is done. Dec 28 11:32:20 managed-node2 systemd[23102]: user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice: Failed to open /run/user/3001/systemd/transient/user-libpod_pod_7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d.slice: No such file or directory Dec 28 11:32:20 managed-node2 podman[24954]: Pods stopped: Dec 28 11:32:20 managed-node2 podman[24954]: 7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d Dec 28 11:32:20 managed-node2 podman[24954]: Pods removed: Dec 28 11:32:20 managed-node2 podman[24954]: 7a1a570a1a215054f3019c88b88b4933764960e040bc601fa180f4b89d36a26d Dec 28 11:32:20 managed-node2 podman[24954]: Secrets removed: Dec 28 11:32:20 managed-node2 podman[24954]: Volumes removed: Dec 28 11:32:20 managed-node2 systemd[23102]: Created slice user-libpod_pod_3b4388a2a9b762501c124a3661654120f9263a80f5f11456345e85d379c6d7fe.slice - cgroup user-libpod_pod_3b4388a2a9b762501c124a3661654120f9263a80f5f11456345e85d379c6d7fe.slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 86. Dec 28 11:32:20 managed-node2 systemd[23102]: Started libpod-1a70bb1a83df1295fd28d2153b789097b0f0ccfdacba4b75f6617110edd97254.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 90. Dec 28 11:32:20 managed-node2 systemd[23102]: Started rootless-netns-15dce554.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 94. Dec 28 11:32:20 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Dec 28 11:32:20 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Dec 28 11:32:20 managed-node2 kernel: veth0: entered allmulticast mode Dec 28 11:32:20 managed-node2 kernel: veth0: entered promiscuous mode Dec 28 11:32:20 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Dec 28 11:32:20 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Dec 28 11:32:20 managed-node2 systemd[23102]: Started run-r880a3ae287c74593b989382815608bfc.scope - /usr/libexec/podman/aardvark-dns --config /run/user/3001/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 98. Dec 28 11:32:20 managed-node2 systemd[23102]: Started libpod-1dca1fd830799e789626a48dcc08cc0c920b9abb4783f3320f176c42769859a3.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 102. Dec 28 11:32:20 managed-node2 systemd[23102]: Started libpod-d9139396f2177088f4a474a9b9f8bf7a8a991165bfa36c5a8571244a1a47883f.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 107. Dec 28 11:32:20 managed-node2 systemd[23102]: Started podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 71. Dec 28 11:32:20 managed-node2 podman[24954]: Pod: Dec 28 11:32:20 managed-node2 podman[24954]: 3b4388a2a9b762501c124a3661654120f9263a80f5f11456345e85d379c6d7fe Dec 28 11:32:20 managed-node2 podman[24954]: Container: Dec 28 11:32:20 managed-node2 podman[24954]: d9139396f2177088f4a474a9b9f8bf7a8a991165bfa36c5a8571244a1a47883f Dec 28 11:32:20 managed-node2 sudo[24948]: pam_unix(sudo:session): session closed for user podman_basic_user Dec 28 11:32:21 managed-node2 python3.12[25171]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Dec 28 11:32:22 managed-node2 python3.12[25303]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:32:23 managed-node2 python3.12[25436]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd2.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:32:24 managed-node2 python3.12[25568]: ansible-file Invoked with path=/tmp/lsr_ib6aykrx_podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:32:25 managed-node2 python3.12[25699]: ansible-file Invoked with path=/tmp/lsr_ib6aykrx_podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:32:25 managed-node2 systemd[4481]: Created slice background.slice - User Background Tasks Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 14. Dec 28 11:32:25 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Dec 28 11:32:25 managed-node2 systemd[4481]: Starting systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Dec 28 11:32:25 managed-node2 systemd[4481]: Finished systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Dec 28 11:32:25 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Dec 28 11:32:26 managed-node2 podman[25862]: 2024-12-28 11:32:26.37897644 -0500 EST m=+0.583326334 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Dec 28 11:32:26 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Dec 28 11:32:26 managed-node2 python3.12[26008]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:32:27 managed-node2 python3.12[26139]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:32:27 managed-node2 python3.12[26270]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 28 11:32:28 managed-node2 python3.12[26375]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd2.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1735403547.4831927-15664-184790741417584/.source.yml _original_basename=.z3l5zz8s follow=False checksum=b7bd0e32af83ded16af592ddf05292719e54426e backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:32:28 managed-node2 python3.12[26506]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Dec 28 11:32:28 managed-node2 systemd[1]: Created slice machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice - cgroup machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice. ░░ Subject: A start job for unit machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice has finished successfully. ░░ ░░ The job identifier is 2069. Dec 28 11:32:28 managed-node2 podman[26514]: 2024-12-28 11:32:28.632800229 -0500 EST m=+0.069098346 container create 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f (image=localhost/podman-pause:5.3.1-1733097600, name=1b721eddd0ec-infra, pod_id=1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4, io.buildah.version=1.38.0) Dec 28 11:32:28 managed-node2 podman[26514]: 2024-12-28 11:32:28.637189725 -0500 EST m=+0.073487908 pod create 1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4 (image=, name=httpd2) Dec 28 11:32:28 managed-node2 podman[26514]: 2024-12-28 11:32:28.663977731 -0500 EST m=+0.100275777 container create 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Dec 28 11:32:28 managed-node2 podman[26514]: 2024-12-28 11:32:28.639839913 -0500 EST m=+0.076138036 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Dec 28 11:32:28 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Dec 28 11:32:28 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Dec 28 11:32:28 managed-node2 kernel: veth0: entered allmulticast mode Dec 28 11:32:28 managed-node2 kernel: veth0: entered promiscuous mode Dec 28 11:32:28 managed-node2 NetworkManager[780]: [1735403548.6958] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/3) Dec 28 11:32:28 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Dec 28 11:32:28 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Dec 28 11:32:28 managed-node2 NetworkManager[780]: [1735403548.7019] device (veth0): carrier: link connected Dec 28 11:32:28 managed-node2 NetworkManager[780]: [1735403548.7045] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/4) Dec 28 11:32:28 managed-node2 NetworkManager[780]: [1735403548.7051] device (podman1): carrier: link connected Dec 28 11:32:28 managed-node2 (udev-worker)[26529]: Network interface NamePolicy= disabled on kernel command line. Dec 28 11:32:28 managed-node2 (udev-worker)[26530]: Network interface NamePolicy= disabled on kernel command line. Dec 28 11:32:28 managed-node2 NetworkManager[780]: [1735403548.7571] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Dec 28 11:32:28 managed-node2 NetworkManager[780]: [1735403548.7648] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Dec 28 11:32:28 managed-node2 NetworkManager[780]: [1735403548.7771] device (podman1): Activation: starting connection 'podman1' (8a968ede-a335-4ce0-8911-ebaeb783f432) Dec 28 11:32:28 managed-node2 NetworkManager[780]: [1735403548.7785] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Dec 28 11:32:28 managed-node2 NetworkManager[780]: [1735403548.7788] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external') Dec 28 11:32:28 managed-node2 NetworkManager[780]: [1735403548.7794] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external') Dec 28 11:32:28 managed-node2 NetworkManager[780]: [1735403548.7802] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Dec 28 11:32:28 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2076. Dec 28 11:32:28 managed-node2 systemd[1]: Started run-rcc96bc2b686642e3b1e916b7556e810c.scope - /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-rcc96bc2b686642e3b1e916b7556e810c.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-rcc96bc2b686642e3b1e916b7556e810c.scope has finished successfully. ░░ ░░ The job identifier is 2155. Dec 28 11:32:28 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2076. Dec 28 11:32:28 managed-node2 aardvark-dns[26555]: starting aardvark on a child with pid 26560 Dec 28 11:32:28 managed-node2 aardvark-dns[26560]: Successfully parsed config Dec 28 11:32:28 managed-node2 aardvark-dns[26560]: Listen v4 ip {"podman-default-kube-network": [10.89.0.1]} Dec 28 11:32:28 managed-node2 aardvark-dns[26560]: Listen v6 ip {} Dec 28 11:32:28 managed-node2 aardvark-dns[26560]: Using the following upstream servers: [10.29.169.13:53, 10.29.170.12:53, 10.2.32.1:53] Dec 28 11:32:28 managed-node2 NetworkManager[780]: [1735403548.8129] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Dec 28 11:32:28 managed-node2 NetworkManager[780]: [1735403548.8131] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external') Dec 28 11:32:28 managed-node2 NetworkManager[780]: [1735403548.8135] device (podman1): Activation: successful, device activated. Dec 28 11:32:28 managed-node2 systemd[1]: Started libpod-conmon-25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f.scope. ░░ Subject: A start job for unit libpod-conmon-25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f.scope has finished successfully. ░░ ░░ The job identifier is 2161. Dec 28 11:32:28 managed-node2 conmon[26570]: conmon 25518e805d2160757e9f : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/13/attach} Dec 28 11:32:28 managed-node2 conmon[26570]: conmon 25518e805d2160757e9f : terminal_ctrl_fd: 13 Dec 28 11:32:28 managed-node2 conmon[26570]: conmon 25518e805d2160757e9f : winsz read side: 17, winsz write side: 18 Dec 28 11:32:28 managed-node2 systemd[1]: Started libpod-25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f.scope - libcrun container. ░░ Subject: A start job for unit libpod-25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f.scope has finished successfully. ░░ ░░ The job identifier is 2168. Dec 28 11:32:28 managed-node2 conmon[26570]: conmon 25518e805d2160757e9f : container PID: 26572 Dec 28 11:32:28 managed-node2 podman[26514]: 2024-12-28 11:32:28.886829431 -0500 EST m=+0.323127569 container init 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f (image=localhost/podman-pause:5.3.1-1733097600, name=1b721eddd0ec-infra, pod_id=1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4, io.buildah.version=1.38.0) Dec 28 11:32:28 managed-node2 podman[26514]: 2024-12-28 11:32:28.89009851 -0500 EST m=+0.326396555 container start 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f (image=localhost/podman-pause:5.3.1-1733097600, name=1b721eddd0ec-infra, pod_id=1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4, io.buildah.version=1.38.0) Dec 28 11:32:28 managed-node2 systemd[1]: Started libpod-conmon-5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd.scope. ░░ Subject: A start job for unit libpod-conmon-5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd.scope has finished successfully. ░░ ░░ The job identifier is 2175. Dec 28 11:32:28 managed-node2 conmon[26575]: conmon 5ef0ceeadc21eec1e469 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/12/attach} Dec 28 11:32:28 managed-node2 conmon[26575]: conmon 5ef0ceeadc21eec1e469 : terminal_ctrl_fd: 12 Dec 28 11:32:28 managed-node2 conmon[26575]: conmon 5ef0ceeadc21eec1e469 : winsz read side: 16, winsz write side: 17 Dec 28 11:32:28 managed-node2 systemd[1]: Started libpod-5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd.scope - libcrun container. ░░ Subject: A start job for unit libpod-5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd.scope has finished successfully. ░░ ░░ The job identifier is 2182. Dec 28 11:32:28 managed-node2 conmon[26575]: conmon 5ef0ceeadc21eec1e469 : container PID: 26577 Dec 28 11:32:28 managed-node2 podman[26514]: 2024-12-28 11:32:28.944782138 -0500 EST m=+0.381080298 container init 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Dec 28 11:32:28 managed-node2 podman[26514]: 2024-12-28 11:32:28.950731488 -0500 EST m=+0.387029633 container start 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Dec 28 11:32:28 managed-node2 podman[26514]: 2024-12-28 11:32:28.955422911 -0500 EST m=+0.391720957 pod start 1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4 (image=, name=httpd2) Dec 28 11:32:28 managed-node2 python3.12[26506]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml Dec 28 11:32:28 managed-node2 python3.12[26506]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod: 1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4 Container: 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd Dec 28 11:32:28 managed-node2 python3.12[26506]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2024-12-28T11:32:28-05:00" level=info msg="/usr/bin/podman filtering at log level debug" time="2024-12-28T11:32:28-05:00" level=debug msg="Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2024-12-28T11:32:28-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2024-12-28T11:32:28-05:00" level=info msg="Using sqlite as database backend" time="2024-12-28T11:32:28-05:00" level=debug msg="Using graph driver overlay" time="2024-12-28T11:32:28-05:00" level=debug msg="Using graph root /var/lib/containers/storage" time="2024-12-28T11:32:28-05:00" level=debug msg="Using run root /run/containers/storage" time="2024-12-28T11:32:28-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" time="2024-12-28T11:32:28-05:00" level=debug msg="Using tmp dir /run/libpod" time="2024-12-28T11:32:28-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" time="2024-12-28T11:32:28-05:00" level=debug msg="Using transient store: false" time="2024-12-28T11:32:28-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2024-12-28T11:32:28-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2024-12-28T11:32:28-05:00" level=debug msg="Cached value indicated that metacopy is being used" time="2024-12-28T11:32:28-05:00" level=debug msg="Cached value indicated that native-diff is not being used" time="2024-12-28T11:32:28-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" time="2024-12-28T11:32:28-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" time="2024-12-28T11:32:28-05:00" level=debug msg="Initializing event backend journald" time="2024-12-28T11:32:28-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" time="2024-12-28T11:32:28-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2024-12-28T11:32:28-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2024-12-28T11:32:28-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2024-12-28T11:32:28-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" time="2024-12-28T11:32:28-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2024-12-28T11:32:28-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2024-12-28T11:32:28-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2024-12-28T11:32:28-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2024-12-28T11:32:28-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" time="2024-12-28T11:32:28-05:00" level=info msg="Setting parallel job count to 7" time="2024-12-28T11:32:28-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network 65c443db6f7e5a9e1793416c0c5d9e9890e40c9a51785023cf18dc8be7eae490 bridge podman1 2024-12-28 11:30:09.866191781 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2024-12-28T11:32:28-05:00" level=debug msg="Successfully loaded 2 networks" time="2024-12-28T11:32:28-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2024-12-28T11:32:28-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-28T11:32:28-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2024-12-28T11:32:28-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@6c58b1756bb7cf9553687745e2a74f39255a6385ad87c23e911e6951449ed327\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2024-12-28T11:32:28-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@6c58b1756bb7cf9553687745e2a74f39255a6385ad87c23e911e6951449ed327)" time="2024-12-28T11:32:28-05:00" level=debug msg="exporting opaque data as blob \"sha256:6c58b1756bb7cf9553687745e2a74f39255a6385ad87c23e911e6951449ed327\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Pod using bridge network mode" time="2024-12-28T11:32:28-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice for parent machine.slice and name libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4" time="2024-12-28T11:32:28-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice" time="2024-12-28T11:32:28-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice" time="2024-12-28T11:32:28-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2024-12-28T11:32:28-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-28T11:32:28-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2024-12-28T11:32:28-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@6c58b1756bb7cf9553687745e2a74f39255a6385ad87c23e911e6951449ed327\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2024-12-28T11:32:28-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@6c58b1756bb7cf9553687745e2a74f39255a6385ad87c23e911e6951449ed327)" time="2024-12-28T11:32:28-05:00" level=debug msg="exporting opaque data as blob \"sha256:6c58b1756bb7cf9553687745e2a74f39255a6385ad87c23e911e6951449ed327\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Inspecting image 6c58b1756bb7cf9553687745e2a74f39255a6385ad87c23e911e6951449ed327" time="2024-12-28T11:32:28-05:00" level=debug msg="exporting opaque data as blob \"sha256:6c58b1756bb7cf9553687745e2a74f39255a6385ad87c23e911e6951449ed327\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Inspecting image 6c58b1756bb7cf9553687745e2a74f39255a6385ad87c23e911e6951449ed327" time="2024-12-28T11:32:28-05:00" level=debug msg="Inspecting image 6c58b1756bb7cf9553687745e2a74f39255a6385ad87c23e911e6951449ed327" time="2024-12-28T11:32:28-05:00" level=debug msg="using systemd mode: false" time="2024-12-28T11:32:28-05:00" level=debug msg="setting container name 1b721eddd0ec-infra" time="2024-12-28T11:32:28-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Allocated lock 1 for container 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f" time="2024-12-28T11:32:28-05:00" level=debug msg="exporting opaque data as blob \"sha256:6c58b1756bb7cf9553687745e2a74f39255a6385ad87c23e911e6951449ed327\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are supported" time="2024-12-28T11:32:28-05:00" level=debug msg="Created container \"25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Container \"25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f\" has work directory \"/var/lib/containers/storage/overlay-containers/25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f/userdata\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Container \"25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f\" has run directory \"/run/containers/storage/overlay-containers/25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f/userdata\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-28T11:32:28-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-28T11:32:28-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-12-28T11:32:28-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-28T11:32:28-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-12-28T11:32:28-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2024-12-28T11:32:28-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-28T11:32:28-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-28T11:32:28-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-12-28T11:32:28-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-28T11:32:28-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-12-28T11:32:28-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-28T11:32:28-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-28T11:32:28-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-12-28T11:32:28-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-28T11:32:28-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-12-28T11:32:28-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-12-28T11:32:28-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:28-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-28T11:32:28-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-12-28T11:32:28-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-12-28T11:32:28-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-12-28T11:32:28-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-12-28T11:32:28-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-12-28T11:32:28-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:28-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-12-28T11:32:28-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-12-28T11:32:28-05:00" level=debug msg="using systemd mode: false" time="2024-12-28T11:32:28-05:00" level=debug msg="adding container to pod httpd2" time="2024-12-28T11:32:28-05:00" level=debug msg="setting container name httpd2-httpd2" time="2024-12-28T11:32:28-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2024-12-28T11:32:28-05:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2024-12-28T11:32:28-05:00" level=debug msg="Adding mount /proc" time="2024-12-28T11:32:28-05:00" level=debug msg="Adding mount /dev" time="2024-12-28T11:32:28-05:00" level=debug msg="Adding mount /dev/pts" time="2024-12-28T11:32:28-05:00" level=debug msg="Adding mount /dev/mqueue" time="2024-12-28T11:32:28-05:00" level=debug msg="Adding mount /sys" time="2024-12-28T11:32:28-05:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2024-12-28T11:32:28-05:00" level=debug msg="Allocated lock 2 for container 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd" time="2024-12-28T11:32:28-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Created container \"5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Container \"5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd\" has work directory \"/var/lib/containers/storage/overlay-containers/5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd/userdata\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Container \"5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd\" has run directory \"/run/containers/storage/overlay-containers/5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd/userdata\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Strongconnecting node 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f" time="2024-12-28T11:32:28-05:00" level=debug msg="Pushed 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f onto stack" time="2024-12-28T11:32:28-05:00" level=debug msg="Finishing node 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f. Popped 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f off stack" time="2024-12-28T11:32:28-05:00" level=debug msg="Strongconnecting node 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd" time="2024-12-28T11:32:28-05:00" level=debug msg="Pushed 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd onto stack" time="2024-12-28T11:32:28-05:00" level=debug msg="Finishing node 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd. Popped 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd off stack" time="2024-12-28T11:32:28-05:00" level=debug msg="overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/GICPFZE4VD52VXKDKOFWFG6QWF,upperdir=/var/lib/containers/storage/overlay/818c396c5e7f0de543197f68d2ce0b2347698f88e8e3f2cd4106b3b20a723b04/diff,workdir=/var/lib/containers/storage/overlay/818c396c5e7f0de543197f68d2ce0b2347698f88e8e3f2cd4106b3b20a723b04/work,nodev,metacopy=on,context=\"system_u:object_r:container_file_t:s0:c109,c131\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Made network namespace at /run/netns/netns-fe725b46-5a81-c356-183a-5f1b3e306c82 for container 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f" time="2024-12-28T11:32:28-05:00" level=debug msg="Mounted container \"25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f\" at \"/var/lib/containers/storage/overlay/818c396c5e7f0de543197f68d2ce0b2347698f88e8e3f2cd4106b3b20a723b04/merged\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Created root filesystem for container 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f at /var/lib/containers/storage/overlay/818c396c5e7f0de543197f68d2ce0b2347698f88e8e3f2cd4106b3b20a723b04/merged" [DEBUG netavark::network::validation] Validating network namespace... [DEBUG netavark::commands::setup] Setting up... [INFO netavark::firewall] Using nftables firewall driver [DEBUG netavark::network::bridge] Setup network podman-default-kube-network [DEBUG netavark::network::bridge] Container interface name: eth0 with IP addresses [10.89.0.2/24] [DEBUG netavark::network::bridge] Bridge name: podman1 with IP addresses [10.89.0.1/24] [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.ip_forward to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/podman1/rp_filter to 2 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv6/conf/eth0/autoconf to 0 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/arp_notify to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/rp_filter to 2 [INFO netavark::network::netlink] Adding route (dest: 0.0.0.0/0 ,gw: 10.89.0.1, metric 100) [DEBUG netavark::firewall::firewalld] Adding firewalld rules for network 10.89.0.0/24 [DEBUG netavark::firewall::firewalld] Adding subnet 10.89.0.0/24 to zone trusted as source [INFO netavark::firewall::nft] Creating container chain nv_65c443db_10_89_0_0_nm24 [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.conf.podman1.route_localnet to 1 [DEBUG netavark::dns::aardvark] Spawning aardvark server [DEBUG netavark::dns::aardvark] start aardvark-dns: ["systemd-run", "-q", "--scope", "/usr/libexec/podman/aardvark-dns", "--config", "/run/containers/networks/aardvark-dns", "-p", "53", "run"] [DEBUG netavark::commands::setup] { "podman-default-kube-network": StatusBlock { dns_search_domains: Some( [ "dns.podman", ], ), dns_server_ips: Some( [ 10.89.0.1, ], ), interfaces: Some( { "eth0": NetInterface { mac_address: "aa:f3:08:81:92:ab", subnets: Some( [ NetAddress { gateway: Some( 10.89.0.1, ), ipnet: 10.89.0.2/24, }, ], ), }, }, ), }, } [DEBUG netavark::commands::setup] Setup complete time="2024-12-28T11:32:28-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2024-12-28T11:32:28-05:00" level=debug msg="Setting Cgroups for container 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f to machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice:libpod:25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f" time="2024-12-28T11:32:28-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2024-12-28T11:32:28-05:00" level=debug msg="Workdir \"/\" resolved to host path \"/var/lib/containers/storage/overlay/818c396c5e7f0de543197f68d2ce0b2347698f88e8e3f2cd4106b3b20a723b04/merged\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Created OCI spec for container 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f at /var/lib/containers/storage/overlay-containers/25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f/userdata/config.json" time="2024-12-28T11:32:28-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice for parent machine.slice and name libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4" time="2024-12-28T11:32:28-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice" time="2024-12-28T11:32:28-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice" time="2024-12-28T11:32:28-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2024-12-28T11:32:28-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f -u 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f/userdata -p /run/containers/storage/overlay-containers/25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f/userdata/pidfile -n 1b721eddd0ec-infra --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f]" time="2024-12-28T11:32:28-05:00" level=info msg="Running conmon under slice machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice and unitName libpod-conmon-25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f.scope" time="2024-12-28T11:32:28-05:00" level=debug msg="Received: 26572" time="2024-12-28T11:32:28-05:00" level=info msg="Got Conmon PID as 26570" time="2024-12-28T11:32:28-05:00" level=debug msg="Created container 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f in OCI runtime" time="2024-12-28T11:32:28-05:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2024-12-28T11:32:28-05:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2024-12-28T11:32:28-05:00" level=debug msg="Starting container 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f with command [/catatonit -P]" time="2024-12-28T11:32:28-05:00" level=debug msg="Started container 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f" time="2024-12-28T11:32:28-05:00" level=debug msg="overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/QVFPLMREPESOATKWX6MZW2HMBA,upperdir=/var/lib/containers/storage/overlay/a69fa24142a756260d434ecb147f43723beffc2cf7512f043ff07c57fe5d100e/diff,workdir=/var/lib/containers/storage/overlay/a69fa24142a756260d434ecb147f43723beffc2cf7512f043ff07c57fe5d100e/work,nodev,metacopy=on,context=\"system_u:object_r:container_file_t:s0:c109,c131\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Mounted container \"5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd\" at \"/var/lib/containers/storage/overlay/a69fa24142a756260d434ecb147f43723beffc2cf7512f043ff07c57fe5d100e/merged\"" time="2024-12-28T11:32:28-05:00" level=debug msg="Created root filesystem for container 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd at /var/lib/containers/storage/overlay/a69fa24142a756260d434ecb147f43723beffc2cf7512f043ff07c57fe5d100e/merged" time="2024-12-28T11:32:28-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2024-12-28T11:32:28-05:00" level=debug msg="Setting Cgroups for container 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd to machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice:libpod:5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd" time="2024-12-28T11:32:28-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2024-12-28T11:32:28-05:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2024-12-28T11:32:28-05:00" level=debug msg="Created OCI spec for container 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd at /var/lib/containers/storage/overlay-containers/5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd/userdata/config.json" time="2024-12-28T11:32:28-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice for parent machine.slice and name libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4" time="2024-12-28T11:32:28-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice" time="2024-12-28T11:32:28-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice" time="2024-12-28T11:32:28-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2024-12-28T11:32:28-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd -u 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd/userdata -p /run/containers/storage/overlay-containers/5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd/userdata/pidfile -n httpd2-httpd2 --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd]" time="2024-12-28T11:32:28-05:00" level=info msg="Running conmon under slice machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice and unitName libpod-conmon-5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd.scope" time="2024-12-28T11:32:28-05:00" level=debug msg="Received: 26577" time="2024-12-28T11:32:28-05:00" level=info msg="Got Conmon PID as 26575" time="2024-12-28T11:32:28-05:00" level=debug msg="Created container 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd in OCI runtime" time="2024-12-28T11:32:28-05:00" level=debug msg="Starting container 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd with command [/bin/busybox-extras httpd -f -p 80]" time="2024-12-28T11:32:28-05:00" level=debug msg="Started container 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd" time="2024-12-28T11:32:28-05:00" level=debug msg="Called kube.PersistentPostRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2024-12-28T11:32:28-05:00" level=debug msg="Shutting down engines" time="2024-12-28T11:32:28-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=26514 Dec 28 11:32:28 managed-node2 python3.12[26506]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Dec 28 11:32:29 managed-node2 python3.12[26709]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 28 11:32:29 managed-node2 systemd[1]: Reload requested from client PID 26710 ('systemctl') (unit session-5.scope)... Dec 28 11:32:29 managed-node2 systemd[1]: Reloading... Dec 28 11:32:29 managed-node2 systemd[1]: Reloading finished in 209 ms. Dec 28 11:32:30 managed-node2 python3.12[26896]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Dec 28 11:32:30 managed-node2 systemd[1]: Reload requested from client PID 26899 ('systemctl') (unit session-5.scope)... Dec 28 11:32:30 managed-node2 systemd[1]: Reloading... Dec 28 11:32:30 managed-node2 systemd[1]: Reloading finished in 209 ms. Dec 28 11:32:31 managed-node2 python3.12[27085]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Dec 28 11:32:31 managed-node2 systemd[1]: Created slice system-podman\x2dkube.slice - Slice /system/podman-kube. ░░ Subject: A start job for unit system-podman\x2dkube.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit system-podman\x2dkube.slice has finished successfully. ░░ ░░ The job identifier is 2267. Dec 28 11:32:31 managed-node2 systemd[1]: Starting podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun execution. ░░ ░░ The job identifier is 2189. Dec 28 11:32:31 managed-node2 podman[27089]: 2024-12-28 11:32:31.330130986 -0500 EST m=+0.026544264 pod stop 1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4 (image=, name=httpd2) Dec 28 11:32:31 managed-node2 systemd[1]: libpod-25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f.scope has successfully entered the 'dead' state. Dec 28 11:32:31 managed-node2 podman[27089]: 2024-12-28 11:32:31.359740965 -0500 EST m=+0.056154206 container died 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f (image=localhost/podman-pause:5.3.1-1733097600, name=1b721eddd0ec-infra, io.buildah.version=1.38.0) Dec 28 11:32:31 managed-node2 aardvark-dns[26560]: Received SIGHUP Dec 28 11:32:31 managed-node2 aardvark-dns[26560]: Successfully parsed config Dec 28 11:32:31 managed-node2 aardvark-dns[26560]: Listen v4 ip {} Dec 28 11:32:31 managed-node2 aardvark-dns[26560]: Listen v6 ip {} Dec 28 11:32:31 managed-node2 aardvark-dns[26560]: No configuration found stopping the sever Dec 28 11:32:31 managed-node2 systemd[1]: run-rcc96bc2b686642e3b1e916b7556e810c.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-rcc96bc2b686642e3b1e916b7556e810c.scope has successfully entered the 'dead' state. Dec 28 11:32:31 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Dec 28 11:32:31 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Dec 28 11:32:31 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Dec 28 11:32:31 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f)" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=info msg="Using sqlite as database backend" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Using graph driver overlay" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Using graph root /var/lib/containers/storage" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Using run root /run/containers/storage" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Using tmp dir /run/libpod" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Using transient store: false" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Cached value indicated that overlay is supported" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Cached value indicated that overlay is supported" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Cached value indicated that metacopy is being used" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Cached value indicated that native-diff is not being used" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Initializing event backend journald" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=info msg="Setting parallel job count to 7" Dec 28 11:32:31 managed-node2 NetworkManager[780]: [1735403551.4070] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Dec 28 11:32:31 managed-node2 systemd[1]: run-netns-netns\x2dfe725b46\x2d5a81\x2dc356\x2d183a\x2d5f1b3e306c82.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2dfe725b46\x2d5a81\x2dc356\x2d183a\x2d5f1b3e306c82.mount has successfully entered the 'dead' state. Dec 28 11:32:31 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f-userdata-shm.mount has successfully entered the 'dead' state. Dec 28 11:32:31 managed-node2 systemd[1]: var-lib-containers-storage-overlay-818c396c5e7f0de543197f68d2ce0b2347698f88e8e3f2cd4106b3b20a723b04-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-818c396c5e7f0de543197f68d2ce0b2347698f88e8e3f2cd4106b3b20a723b04-merged.mount has successfully entered the 'dead' state. Dec 28 11:32:31 managed-node2 podman[27089]: 2024-12-28 11:32:31.574954928 -0500 EST m=+0.271367877 container cleanup 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f (image=localhost/podman-pause:5.3.1-1733097600, name=1b721eddd0ec-infra, pod_id=1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4, io.buildah.version=1.38.0) Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f)" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=debug msg="Shutting down engines" Dec 28 11:32:31 managed-node2 /usr/bin/podman[27099]: time="2024-12-28T11:32:31-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=27099 Dec 28 11:32:31 managed-node2 systemd[1]: libpod-conmon-25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f.scope has successfully entered the 'dead' state. Dec 28 11:32:41 managed-node2 podman[27089]: time="2024-12-28T11:32:41-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd2-httpd2 in 10 seconds, resorting to SIGKILL" Dec 28 11:32:41 managed-node2 conmon[26575]: conmon 5ef0ceeadc21eec1e469 : container 26577 exited with status 137 Dec 28 11:32:41 managed-node2 systemd[1]: libpod-5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd.scope has successfully entered the 'dead' state. Dec 28 11:32:41 managed-node2 podman[27089]: 2024-12-28 11:32:41.357634219 -0500 EST m=+10.054047352 container died 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test) Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd)" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=info msg="Using sqlite as database backend" Dec 28 11:32:41 managed-node2 systemd[1]: var-lib-containers-storage-overlay-a69fa24142a756260d434ecb147f43723beffc2cf7512f043ff07c57fe5d100e-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-a69fa24142a756260d434ecb147f43723beffc2cf7512f043ff07c57fe5d100e-merged.mount has successfully entered the 'dead' state. Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Using graph driver overlay" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Using graph root /var/lib/containers/storage" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Using run root /run/containers/storage" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Using tmp dir /run/libpod" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Using transient store: false" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Cached value indicated that overlay is supported" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Cached value indicated that overlay is supported" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Cached value indicated that metacopy is being used" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Cached value indicated that native-diff is not being used" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Initializing event backend journald" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=info msg="Setting parallel job count to 7" Dec 28 11:32:41 managed-node2 podman[27089]: 2024-12-28 11:32:41.387589372 -0500 EST m=+10.084002321 container cleanup 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=info msg="Received shutdown signal \"terminated\", terminating!" PID=27122 Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=info msg="Invoking shutdown handler \"libpod\"" PID=27122 Dec 28 11:32:41 managed-node2 systemd[1]: Stopping libpod-conmon-5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd.scope... ░░ Subject: A stop job for unit libpod-conmon-5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd.scope has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd.scope has begun execution. ░░ ░░ The job identifier is 2275. Dec 28 11:32:41 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Dec 28 11:32:41 managed-node2 /usr/bin/podman[27122]: time="2024-12-28T11:32:41-05:00" level=debug msg="Completed shutdown handler \"libpod\", duration 0s" PID=27122 Dec 28 11:32:41 managed-node2 systemd[1]: libpod-conmon-5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd.scope has successfully entered the 'dead' state. Dec 28 11:32:41 managed-node2 systemd[1]: Stopped libpod-conmon-5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd.scope. ░░ Subject: A stop job for unit libpod-conmon-5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd.scope has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd.scope has finished. ░░ ░░ The job identifier is 2275 and the job result is done. Dec 28 11:32:41 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Dec 28 11:32:41 managed-node2 systemd[1]: Removed slice machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice - cgroup machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice. ░░ Subject: A stop job for unit machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice has finished. ░░ ░░ The job identifier is 2274 and the job result is done. Dec 28 11:32:41 managed-node2 podman[27089]: 2024-12-28 11:32:41.463051907 -0500 EST m=+10.159464865 container remove 5ef0ceeadc21eec1e46993767f9703f6b4a63dbffc2400400ae5ce80b1e5c0fd (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test) Dec 28 11:32:41 managed-node2 podman[27089]: 2024-12-28 11:32:41.483585524 -0500 EST m=+10.179998492 container remove 25518e805d2160757e9f2990de43a98aab195afca4ad958e5eec9b5df8d2f75f (image=localhost/podman-pause:5.3.1-1733097600, name=1b721eddd0ec-infra, pod_id=1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4, io.buildah.version=1.38.0) Dec 28 11:32:41 managed-node2 systemd[1]: machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice: Failed to open /run/systemd/transient/machine-libpod_pod_1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4.slice: No such file or directory Dec 28 11:32:41 managed-node2 podman[27089]: 2024-12-28 11:32:41.491008144 -0500 EST m=+10.187421090 pod remove 1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4 (image=, name=httpd2) Dec 28 11:32:41 managed-node2 podman[27089]: Pods stopped: Dec 28 11:32:41 managed-node2 podman[27089]: 1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4 Dec 28 11:32:41 managed-node2 podman[27089]: Pods removed: Dec 28 11:32:41 managed-node2 podman[27089]: 1b721eddd0ec23b85457c4382f88528d5682dc939b09e383e9bb47744651a4a4 Dec 28 11:32:41 managed-node2 podman[27089]: Secrets removed: Dec 28 11:32:41 managed-node2 podman[27089]: Volumes removed: Dec 28 11:32:41 managed-node2 podman[27089]: 2024-12-28 11:32:41.510438857 -0500 EST m=+10.206851804 container create 56d61cd416db0e9873a410b57624e63df4696e3c814b658c5fab90f30567fc99 (image=localhost/podman-pause:5.3.1-1733097600, name=001056497135-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Dec 28 11:32:41 managed-node2 systemd[1]: Created slice machine-libpod_pod_c97b04ddc09a709b3b82b4754e25707e53c798f96eae35d8706b4bb401e0c41e.slice - cgroup machine-libpod_pod_c97b04ddc09a709b3b82b4754e25707e53c798f96eae35d8706b4bb401e0c41e.slice. ░░ Subject: A start job for unit machine-libpod_pod_c97b04ddc09a709b3b82b4754e25707e53c798f96eae35d8706b4bb401e0c41e.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_c97b04ddc09a709b3b82b4754e25707e53c798f96eae35d8706b4bb401e0c41e.slice has finished successfully. ░░ ░░ The job identifier is 2276. Dec 28 11:32:41 managed-node2 podman[27089]: 2024-12-28 11:32:41.550371276 -0500 EST m=+10.246784226 container create d288a7e886467ea1ffebae2e700b222365dbe90c66b316c94e95523d719d4166 (image=localhost/podman-pause:5.3.1-1733097600, name=c97b04ddc09a-infra, pod_id=c97b04ddc09a709b3b82b4754e25707e53c798f96eae35d8706b4bb401e0c41e, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, io.buildah.version=1.38.0) Dec 28 11:32:41 managed-node2 podman[27089]: 2024-12-28 11:32:41.554904013 -0500 EST m=+10.251317051 pod create c97b04ddc09a709b3b82b4754e25707e53c798f96eae35d8706b4bb401e0c41e (image=, name=httpd2) Dec 28 11:32:41 managed-node2 podman[27089]: 2024-12-28 11:32:41.579200826 -0500 EST m=+10.275613776 container create 3b2e8426a10791b4eaa8a2ab99467b23b24ac42ebce39ee71c99979595bc23ec (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=c97b04ddc09a709b3b82b4754e25707e53c798f96eae35d8706b4bb401e0c41e, app=test, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Dec 28 11:32:41 managed-node2 podman[27089]: 2024-12-28 11:32:41.579621257 -0500 EST m=+10.276034232 container restart 56d61cd416db0e9873a410b57624e63df4696e3c814b658c5fab90f30567fc99 (image=localhost/podman-pause:5.3.1-1733097600, name=001056497135-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Dec 28 11:32:41 managed-node2 podman[27089]: 2024-12-28 11:32:41.557275503 -0500 EST m=+10.253688533 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Dec 28 11:32:41 managed-node2 systemd[1]: Started libpod-56d61cd416db0e9873a410b57624e63df4696e3c814b658c5fab90f30567fc99.scope - libcrun container. ░░ Subject: A start job for unit libpod-56d61cd416db0e9873a410b57624e63df4696e3c814b658c5fab90f30567fc99.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-56d61cd416db0e9873a410b57624e63df4696e3c814b658c5fab90f30567fc99.scope has finished successfully. ░░ ░░ The job identifier is 2282. Dec 28 11:32:41 managed-node2 podman[27089]: 2024-12-28 11:32:41.643666408 -0500 EST m=+10.340079707 container init 56d61cd416db0e9873a410b57624e63df4696e3c814b658c5fab90f30567fc99 (image=localhost/podman-pause:5.3.1-1733097600, name=001056497135-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Dec 28 11:32:41 managed-node2 podman[27089]: 2024-12-28 11:32:41.646444337 -0500 EST m=+10.342857482 container start 56d61cd416db0e9873a410b57624e63df4696e3c814b658c5fab90f30567fc99 (image=localhost/podman-pause:5.3.1-1733097600, name=001056497135-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Dec 28 11:32:41 managed-node2 NetworkManager[780]: [1735403561.6591] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/5) Dec 28 11:32:41 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Dec 28 11:32:41 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Dec 28 11:32:41 managed-node2 kernel: veth0: entered allmulticast mode Dec 28 11:32:41 managed-node2 kernel: veth0: entered promiscuous mode Dec 28 11:32:41 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Dec 28 11:32:41 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Dec 28 11:32:41 managed-node2 NetworkManager[780]: [1735403561.6690] device (podman1): carrier: link connected Dec 28 11:32:41 managed-node2 NetworkManager[780]: [1735403561.6700] device (veth0): carrier: link connected Dec 28 11:32:41 managed-node2 NetworkManager[780]: [1735403561.6704] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/6) Dec 28 11:32:41 managed-node2 (udev-worker)[27143]: Network interface NamePolicy= disabled on kernel command line. Dec 28 11:32:41 managed-node2 (udev-worker)[27142]: Network interface NamePolicy= disabled on kernel command line. Dec 28 11:32:41 managed-node2 NetworkManager[780]: [1735403561.7162] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Dec 28 11:32:41 managed-node2 NetworkManager[780]: [1735403561.7168] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Dec 28 11:32:41 managed-node2 NetworkManager[780]: [1735403561.7173] device (podman1): Activation: starting connection 'podman1' (02db41d1-c81c-4f7f-9817-7016876fce31) Dec 28 11:32:41 managed-node2 NetworkManager[780]: [1735403561.7194] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Dec 28 11:32:41 managed-node2 NetworkManager[780]: [1735403561.7197] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external') Dec 28 11:32:41 managed-node2 NetworkManager[780]: [1735403561.7198] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external') Dec 28 11:32:41 managed-node2 NetworkManager[780]: [1735403561.7202] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Dec 28 11:32:41 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2289. Dec 28 11:32:41 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2289. Dec 28 11:32:41 managed-node2 NetworkManager[780]: [1735403561.7628] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Dec 28 11:32:41 managed-node2 NetworkManager[780]: [1735403561.7631] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external') Dec 28 11:32:41 managed-node2 NetworkManager[780]: [1735403561.7639] device (podman1): Activation: successful, device activated. Dec 28 11:32:41 managed-node2 systemd[1]: Started run-rbc08a3c205e64d89baf5e64d1be58554.scope - /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-rbc08a3c205e64d89baf5e64d1be58554.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-rbc08a3c205e64d89baf5e64d1be58554.scope has finished successfully. ░░ ░░ The job identifier is 2368. Dec 28 11:32:41 managed-node2 systemd[1]: Started libpod-d288a7e886467ea1ffebae2e700b222365dbe90c66b316c94e95523d719d4166.scope - libcrun container. ░░ Subject: A start job for unit libpod-d288a7e886467ea1ffebae2e700b222365dbe90c66b316c94e95523d719d4166.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-d288a7e886467ea1ffebae2e700b222365dbe90c66b316c94e95523d719d4166.scope has finished successfully. ░░ ░░ The job identifier is 2374. Dec 28 11:32:41 managed-node2 podman[27089]: 2024-12-28 11:32:41.814913125 -0500 EST m=+10.511326328 container init d288a7e886467ea1ffebae2e700b222365dbe90c66b316c94e95523d719d4166 (image=localhost/podman-pause:5.3.1-1733097600, name=c97b04ddc09a-infra, pod_id=c97b04ddc09a709b3b82b4754e25707e53c798f96eae35d8706b4bb401e0c41e, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, io.buildah.version=1.38.0) Dec 28 11:32:41 managed-node2 podman[27089]: 2024-12-28 11:32:41.817516902 -0500 EST m=+10.513929917 container start d288a7e886467ea1ffebae2e700b222365dbe90c66b316c94e95523d719d4166 (image=localhost/podman-pause:5.3.1-1733097600, name=c97b04ddc09a-infra, pod_id=c97b04ddc09a709b3b82b4754e25707e53c798f96eae35d8706b4bb401e0c41e, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, io.buildah.version=1.38.0) Dec 28 11:32:41 managed-node2 systemd[1]: Started libpod-3b2e8426a10791b4eaa8a2ab99467b23b24ac42ebce39ee71c99979595bc23ec.scope - libcrun container. ░░ Subject: A start job for unit libpod-3b2e8426a10791b4eaa8a2ab99467b23b24ac42ebce39ee71c99979595bc23ec.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-3b2e8426a10791b4eaa8a2ab99467b23b24ac42ebce39ee71c99979595bc23ec.scope has finished successfully. ░░ ░░ The job identifier is 2381. Dec 28 11:32:41 managed-node2 podman[27089]: 2024-12-28 11:32:41.857040889 -0500 EST m=+10.553453868 container init 3b2e8426a10791b4eaa8a2ab99467b23b24ac42ebce39ee71c99979595bc23ec (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=c97b04ddc09a709b3b82b4754e25707e53c798f96eae35d8706b4bb401e0c41e, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Dec 28 11:32:41 managed-node2 podman[27089]: 2024-12-28 11:32:41.859993687 -0500 EST m=+10.556406630 container start 3b2e8426a10791b4eaa8a2ab99467b23b24ac42ebce39ee71c99979595bc23ec (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=c97b04ddc09a709b3b82b4754e25707e53c798f96eae35d8706b4bb401e0c41e, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Dec 28 11:32:41 managed-node2 podman[27089]: 2024-12-28 11:32:41.864704183 -0500 EST m=+10.561117145 pod start c97b04ddc09a709b3b82b4754e25707e53c798f96eae35d8706b4bb401e0c41e (image=, name=httpd2) Dec 28 11:32:41 managed-node2 podman[27089]: Pod: Dec 28 11:32:41 managed-node2 podman[27089]: c97b04ddc09a709b3b82b4754e25707e53c798f96eae35d8706b4bb401e0c41e Dec 28 11:32:41 managed-node2 podman[27089]: Container: Dec 28 11:32:41 managed-node2 podman[27089]: 3b2e8426a10791b4eaa8a2ab99467b23b24ac42ebce39ee71c99979595bc23ec Dec 28 11:32:41 managed-node2 systemd[1]: Started podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished successfully. ░░ ░░ The job identifier is 2189. Dec 28 11:32:42 managed-node2 python3.12[27323]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:32:43 managed-node2 python3.12[27456]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd3.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:32:44 managed-node2 python3.12[27588]: ansible-file Invoked with path=/tmp/lsr_ib6aykrx_podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:32:45 managed-node2 python3.12[27719]: ansible-file Invoked with path=/tmp/lsr_ib6aykrx_podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:32:46 managed-node2 podman[27880]: 2024-12-28 11:32:46.66606436 -0500 EST m=+0.856555194 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Dec 28 11:32:47 managed-node2 python3.12[28026]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:32:47 managed-node2 python3.12[28157]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:32:48 managed-node2 python3.12[28288]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 28 11:32:48 managed-node2 python3.12[28393]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd3.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1735403567.8478522-16492-120957427058681/.source.yml _original_basename=.alralwvc follow=False checksum=7d6c191d6b3239df2f07df7943d4f739ea9dd879 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:32:48 managed-node2 python3.12[28524]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Dec 28 11:32:49 managed-node2 systemd[1]: Created slice machine-libpod_pod_70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1.slice - cgroup machine-libpod_pod_70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1.slice. ░░ Subject: A start job for unit machine-libpod_pod_70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1.slice has finished successfully. ░░ ░░ The job identifier is 2388. Dec 28 11:32:49 managed-node2 podman[28532]: 2024-12-28 11:32:49.041754705 -0500 EST m=+0.060646134 container create 4873108eff2c58e99cb91a0081b1e34baa0c9a509af1ffe1e985955af23b446a (image=localhost/podman-pause:5.3.1-1733097600, name=70c479aaa7d2-infra, pod_id=70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1, io.buildah.version=1.38.0) Dec 28 11:32:49 managed-node2 podman[28532]: 2024-12-28 11:32:49.046650062 -0500 EST m=+0.065541475 pod create 70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1 (image=, name=httpd3) Dec 28 11:32:49 managed-node2 podman[28532]: 2024-12-28 11:32:49.070972773 -0500 EST m=+0.089864372 container create 60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1, created_by=test/system/build-testimage, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry, created_at=2021-06-10T18:55:36Z) Dec 28 11:32:49 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Dec 28 11:32:49 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Dec 28 11:32:49 managed-node2 podman[28532]: 2024-12-28 11:32:49.048760037 -0500 EST m=+0.067651582 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Dec 28 11:32:49 managed-node2 kernel: veth1: entered allmulticast mode Dec 28 11:32:49 managed-node2 kernel: veth1: entered promiscuous mode Dec 28 11:32:49 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Dec 28 11:32:49 managed-node2 kernel: podman1: port 2(veth1) entered forwarding state Dec 28 11:32:49 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Dec 28 11:32:49 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Dec 28 11:32:49 managed-node2 kernel: podman1: port 2(veth1) entered forwarding state Dec 28 11:32:49 managed-node2 NetworkManager[780]: [1735403569.1130] manager: (veth1): new Veth device (/org/freedesktop/NetworkManager/Devices/7) Dec 28 11:32:49 managed-node2 NetworkManager[780]: [1735403569.1143] device (veth1): carrier: link connected Dec 28 11:32:49 managed-node2 (udev-worker)[28547]: Network interface NamePolicy= disabled on kernel command line. Dec 28 11:32:49 managed-node2 systemd[1]: Started libpod-conmon-4873108eff2c58e99cb91a0081b1e34baa0c9a509af1ffe1e985955af23b446a.scope. ░░ Subject: A start job for unit libpod-conmon-4873108eff2c58e99cb91a0081b1e34baa0c9a509af1ffe1e985955af23b446a.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-4873108eff2c58e99cb91a0081b1e34baa0c9a509af1ffe1e985955af23b446a.scope has finished successfully. ░░ ░░ The job identifier is 2395. Dec 28 11:32:49 managed-node2 systemd[1]: Started libpod-4873108eff2c58e99cb91a0081b1e34baa0c9a509af1ffe1e985955af23b446a.scope - libcrun container. ░░ Subject: A start job for unit libpod-4873108eff2c58e99cb91a0081b1e34baa0c9a509af1ffe1e985955af23b446a.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-4873108eff2c58e99cb91a0081b1e34baa0c9a509af1ffe1e985955af23b446a.scope has finished successfully. ░░ ░░ The job identifier is 2402. Dec 28 11:32:49 managed-node2 podman[28532]: 2024-12-28 11:32:49.213656897 -0500 EST m=+0.232548524 container init 4873108eff2c58e99cb91a0081b1e34baa0c9a509af1ffe1e985955af23b446a (image=localhost/podman-pause:5.3.1-1733097600, name=70c479aaa7d2-infra, pod_id=70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1, io.buildah.version=1.38.0) Dec 28 11:32:49 managed-node2 podman[28532]: 2024-12-28 11:32:49.217547099 -0500 EST m=+0.236438575 container start 4873108eff2c58e99cb91a0081b1e34baa0c9a509af1ffe1e985955af23b446a (image=localhost/podman-pause:5.3.1-1733097600, name=70c479aaa7d2-infra, pod_id=70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1, io.buildah.version=1.38.0) Dec 28 11:32:49 managed-node2 systemd[1]: Started libpod-conmon-60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7.scope. ░░ Subject: A start job for unit libpod-conmon-60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7.scope has finished successfully. ░░ ░░ The job identifier is 2409. Dec 28 11:32:49 managed-node2 systemd[1]: Started libpod-60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7.scope - libcrun container. ░░ Subject: A start job for unit libpod-60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7.scope has finished successfully. ░░ ░░ The job identifier is 2416. Dec 28 11:32:49 managed-node2 podman[28532]: 2024-12-28 11:32:49.277304737 -0500 EST m=+0.296196201 container init 60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Dec 28 11:32:49 managed-node2 podman[28532]: 2024-12-28 11:32:49.279831494 -0500 EST m=+0.298723124 container start 60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Dec 28 11:32:49 managed-node2 podman[28532]: 2024-12-28 11:32:49.284418309 -0500 EST m=+0.303309727 pod start 70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1 (image=, name=httpd3) Dec 28 11:32:49 managed-node2 python3.12[28711]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 28 11:32:49 managed-node2 systemd[1]: Reload requested from client PID 28712 ('systemctl') (unit session-5.scope)... Dec 28 11:32:49 managed-node2 systemd[1]: Reloading... Dec 28 11:32:50 managed-node2 systemd[1]: Reloading finished in 221 ms. Dec 28 11:32:50 managed-node2 python3.12[28899]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Dec 28 11:32:50 managed-node2 systemd[1]: Reload requested from client PID 28902 ('systemctl') (unit session-5.scope)... Dec 28 11:32:50 managed-node2 systemd[1]: Reloading... Dec 28 11:32:51 managed-node2 systemd[1]: Reloading finished in 220 ms. Dec 28 11:32:51 managed-node2 systemd[1]: Starting fstrim.service - Discard unused blocks on filesystems from /etc/fstab... ░░ Subject: A start job for unit fstrim.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit fstrim.service has begun execution. ░░ ░░ The job identifier is 2423. Dec 28 11:32:51 managed-node2 systemd[1]: fstrim.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit fstrim.service has successfully entered the 'dead' state. Dec 28 11:32:51 managed-node2 systemd[1]: Finished fstrim.service - Discard unused blocks on filesystems from /etc/fstab. ░░ Subject: A start job for unit fstrim.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit fstrim.service has finished successfully. ░░ ░░ The job identifier is 2423. Dec 28 11:32:51 managed-node2 python3.12[29091]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Dec 28 11:32:51 managed-node2 systemd[1]: Starting podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun execution. ░░ ░░ The job identifier is 2501. Dec 28 11:32:51 managed-node2 podman[29095]: 2024-12-28 11:32:51.738851481 -0500 EST m=+0.024762631 pod stop 70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1 (image=, name=httpd3) Dec 28 11:32:51 managed-node2 systemd[1]: libpod-4873108eff2c58e99cb91a0081b1e34baa0c9a509af1ffe1e985955af23b446a.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-4873108eff2c58e99cb91a0081b1e34baa0c9a509af1ffe1e985955af23b446a.scope has successfully entered the 'dead' state. Dec 28 11:32:51 managed-node2 podman[29095]: 2024-12-28 11:32:51.76132352 -0500 EST m=+0.047234619 container died 4873108eff2c58e99cb91a0081b1e34baa0c9a509af1ffe1e985955af23b446a (image=localhost/podman-pause:5.3.1-1733097600, name=70c479aaa7d2-infra, io.buildah.version=1.38.0) Dec 28 11:32:51 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Dec 28 11:32:51 managed-node2 kernel: veth1 (unregistering): left allmulticast mode Dec 28 11:32:51 managed-node2 kernel: veth1 (unregistering): left promiscuous mode Dec 28 11:32:51 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Dec 28 11:32:51 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Dec 28 11:32:51 managed-node2 systemd[1]: run-netns-netns\x2d363d34ef\x2dd6a9\x2d26ce\x2d8aaf\x2da1ed375e8f80.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2d363d34ef\x2dd6a9\x2d26ce\x2d8aaf\x2da1ed375e8f80.mount has successfully entered the 'dead' state. Dec 28 11:32:51 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-4873108eff2c58e99cb91a0081b1e34baa0c9a509af1ffe1e985955af23b446a-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-4873108eff2c58e99cb91a0081b1e34baa0c9a509af1ffe1e985955af23b446a-userdata-shm.mount has successfully entered the 'dead' state. Dec 28 11:32:51 managed-node2 systemd[1]: var-lib-containers-storage-overlay-24cbbe9b3e67065b5840bc0768118c6456d294ea9f22e71509544de0674a5376-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-24cbbe9b3e67065b5840bc0768118c6456d294ea9f22e71509544de0674a5376-merged.mount has successfully entered the 'dead' state. Dec 28 11:32:51 managed-node2 podman[29095]: 2024-12-28 11:32:51.838200317 -0500 EST m=+0.124111426 container cleanup 4873108eff2c58e99cb91a0081b1e34baa0c9a509af1ffe1e985955af23b446a (image=localhost/podman-pause:5.3.1-1733097600, name=70c479aaa7d2-infra, pod_id=70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1, io.buildah.version=1.38.0) Dec 28 11:32:51 managed-node2 systemd[1]: libpod-conmon-4873108eff2c58e99cb91a0081b1e34baa0c9a509af1ffe1e985955af23b446a.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-4873108eff2c58e99cb91a0081b1e34baa0c9a509af1ffe1e985955af23b446a.scope has successfully entered the 'dead' state. Dec 28 11:33:01 managed-node2 podman[29095]: time="2024-12-28T11:33:01-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd3-httpd3 in 10 seconds, resorting to SIGKILL" Dec 28 11:33:01 managed-node2 systemd[1]: libpod-60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7.scope has successfully entered the 'dead' state. Dec 28 11:33:01 managed-node2 conmon[28577]: conmon 60498560b41dfa16aeb6 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1.slice/libpod-60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7.scope/container/memory.events Dec 28 11:33:01 managed-node2 podman[29095]: 2024-12-28 11:33:01.780253993 -0500 EST m=+10.066165205 container died 60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Dec 28 11:33:01 managed-node2 systemd[1]: var-lib-containers-storage-overlay-748adaeccd2f2432451a853a5f336086cf212ebf7a14e44d566db9416d6c1044-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-748adaeccd2f2432451a853a5f336086cf212ebf7a14e44d566db9416d6c1044-merged.mount has successfully entered the 'dead' state. Dec 28 11:33:01 managed-node2 podman[29095]: 2024-12-28 11:33:01.81589406 -0500 EST m=+10.101805167 container cleanup 60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Dec 28 11:33:01 managed-node2 systemd[1]: libpod-conmon-60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7.scope has successfully entered the 'dead' state. Dec 28 11:33:01 managed-node2 systemd[1]: Stopped libpod-conmon-60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7.scope. ░░ Subject: A stop job for unit libpod-conmon-60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7.scope has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7.scope has finished. ░░ ░░ The job identifier is 2587 and the job result is done. Dec 28 11:33:01 managed-node2 systemd[1]: Removed slice machine-libpod_pod_70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1.slice - cgroup machine-libpod_pod_70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1.slice. ░░ Subject: A stop job for unit machine-libpod_pod_70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1.slice has finished. ░░ ░░ The job identifier is 2586 and the job result is done. Dec 28 11:33:01 managed-node2 podman[29095]: 2024-12-28 11:33:01.874667873 -0500 EST m=+10.160578980 container remove 60498560b41dfa16aeb6bf24d8eff5ba41cc725aee1bbe7364db3bfe4c12eea7 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Dec 28 11:33:01 managed-node2 podman[29095]: 2024-12-28 11:33:01.899163625 -0500 EST m=+10.185074743 container remove 4873108eff2c58e99cb91a0081b1e34baa0c9a509af1ffe1e985955af23b446a (image=localhost/podman-pause:5.3.1-1733097600, name=70c479aaa7d2-infra, pod_id=70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1, io.buildah.version=1.38.0) Dec 28 11:33:01 managed-node2 systemd[1]: machine-libpod_pod_70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1.slice: Failed to open /run/systemd/transient/machine-libpod_pod_70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1.slice: No such file or directory Dec 28 11:33:01 managed-node2 podman[29095]: 2024-12-28 11:33:01.90814883 -0500 EST m=+10.194059937 pod remove 70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1 (image=, name=httpd3) Dec 28 11:33:01 managed-node2 podman[29095]: Pods stopped: Dec 28 11:33:01 managed-node2 podman[29095]: 70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1 Dec 28 11:33:01 managed-node2 podman[29095]: Pods removed: Dec 28 11:33:01 managed-node2 podman[29095]: 70c479aaa7d29560efc0a379f09d6a7859e5fae543d0662f7efae14e634ec9e1 Dec 28 11:33:01 managed-node2 podman[29095]: Secrets removed: Dec 28 11:33:01 managed-node2 podman[29095]: Volumes removed: Dec 28 11:33:01 managed-node2 podman[29095]: 2024-12-28 11:33:01.931505206 -0500 EST m=+10.217416321 container create 0a5a42555773a6a24c728f35a16f2572cb7591c3fa4f35470106214125b1e4a4 (image=localhost/podman-pause:5.3.1-1733097600, name=61ef7cf78877-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Dec 28 11:33:01 managed-node2 systemd[1]: Created slice machine-libpod_pod_00b5192a1328a4a5c28ba51376534cc9be9dfdcfb70812f018aa8b3e73e32f39.slice - cgroup machine-libpod_pod_00b5192a1328a4a5c28ba51376534cc9be9dfdcfb70812f018aa8b3e73e32f39.slice. ░░ Subject: A start job for unit machine-libpod_pod_00b5192a1328a4a5c28ba51376534cc9be9dfdcfb70812f018aa8b3e73e32f39.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_00b5192a1328a4a5c28ba51376534cc9be9dfdcfb70812f018aa8b3e73e32f39.slice has finished successfully. ░░ ░░ The job identifier is 2588. Dec 28 11:33:01 managed-node2 podman[29095]: 2024-12-28 11:33:01.96964967 -0500 EST m=+10.255560855 container create 4c761524f51e15bb369fc3dcac51e16b446754a6acd21c6d75d67030850a4457 (image=localhost/podman-pause:5.3.1-1733097600, name=00b5192a1328-infra, pod_id=00b5192a1328a4a5c28ba51376534cc9be9dfdcfb70812f018aa8b3e73e32f39, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Dec 28 11:33:01 managed-node2 podman[29095]: 2024-12-28 11:33:01.974886751 -0500 EST m=+10.260797856 pod create 00b5192a1328a4a5c28ba51376534cc9be9dfdcfb70812f018aa8b3e73e32f39 (image=, name=httpd3) Dec 28 11:33:01 managed-node2 podman[29095]: 2024-12-28 11:33:01.977143449 -0500 EST m=+10.263054644 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Dec 28 11:33:01 managed-node2 podman[29095]: 2024-12-28 11:33:01.99926819 -0500 EST m=+10.285179388 container create aa8d45554bfc3f5853e11e2b0ee0deb35b2207362cf9dd3437195c9fa9aa325b (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=00b5192a1328a4a5c28ba51376534cc9be9dfdcfb70812f018aa8b3e73e32f39, created_by=test/system/build-testimage, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, created_at=2021-06-10T18:55:36Z) Dec 28 11:33:01 managed-node2 podman[29095]: 2024-12-28 11:33:01.999650548 -0500 EST m=+10.285561668 container restart 0a5a42555773a6a24c728f35a16f2572cb7591c3fa4f35470106214125b1e4a4 (image=localhost/podman-pause:5.3.1-1733097600, name=61ef7cf78877-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Dec 28 11:33:02 managed-node2 systemd[1]: Started libpod-0a5a42555773a6a24c728f35a16f2572cb7591c3fa4f35470106214125b1e4a4.scope - libcrun container. ░░ Subject: A start job for unit libpod-0a5a42555773a6a24c728f35a16f2572cb7591c3fa4f35470106214125b1e4a4.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-0a5a42555773a6a24c728f35a16f2572cb7591c3fa4f35470106214125b1e4a4.scope has finished successfully. ░░ ░░ The job identifier is 2594. Dec 28 11:33:02 managed-node2 podman[29095]: 2024-12-28 11:33:02.045936814 -0500 EST m=+10.331847983 container init 0a5a42555773a6a24c728f35a16f2572cb7591c3fa4f35470106214125b1e4a4 (image=localhost/podman-pause:5.3.1-1733097600, name=61ef7cf78877-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Dec 28 11:33:02 managed-node2 podman[29095]: 2024-12-28 11:33:02.049529032 -0500 EST m=+10.335440423 container start 0a5a42555773a6a24c728f35a16f2572cb7591c3fa4f35470106214125b1e4a4 (image=localhost/podman-pause:5.3.1-1733097600, name=61ef7cf78877-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Dec 28 11:33:02 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Dec 28 11:33:02 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Dec 28 11:33:02 managed-node2 kernel: veth1: entered allmulticast mode Dec 28 11:33:02 managed-node2 kernel: veth1: entered promiscuous mode Dec 28 11:33:02 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Dec 28 11:33:02 managed-node2 kernel: podman1: port 2(veth1) entered forwarding state Dec 28 11:33:02 managed-node2 NetworkManager[780]: [1735403582.0775] manager: (veth1): new Veth device (/org/freedesktop/NetworkManager/Devices/8) Dec 28 11:33:02 managed-node2 NetworkManager[780]: [1735403582.0793] device (veth1): carrier: link connected Dec 28 11:33:02 managed-node2 (udev-worker)[29142]: Network interface NamePolicy= disabled on kernel command line. Dec 28 11:33:02 managed-node2 systemd[1]: Started libpod-4c761524f51e15bb369fc3dcac51e16b446754a6acd21c6d75d67030850a4457.scope - libcrun container. ░░ Subject: A start job for unit libpod-4c761524f51e15bb369fc3dcac51e16b446754a6acd21c6d75d67030850a4457.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-4c761524f51e15bb369fc3dcac51e16b446754a6acd21c6d75d67030850a4457.scope has finished successfully. ░░ ░░ The job identifier is 2601. Dec 28 11:33:02 managed-node2 podman[29095]: 2024-12-28 11:33:02.17052525 -0500 EST m=+10.456436441 container init 4c761524f51e15bb369fc3dcac51e16b446754a6acd21c6d75d67030850a4457 (image=localhost/podman-pause:5.3.1-1733097600, name=00b5192a1328-infra, pod_id=00b5192a1328a4a5c28ba51376534cc9be9dfdcfb70812f018aa8b3e73e32f39, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, io.buildah.version=1.38.0) Dec 28 11:33:02 managed-node2 podman[29095]: 2024-12-28 11:33:02.173125408 -0500 EST m=+10.459036745 container start 4c761524f51e15bb369fc3dcac51e16b446754a6acd21c6d75d67030850a4457 (image=localhost/podman-pause:5.3.1-1733097600, name=00b5192a1328-infra, pod_id=00b5192a1328a4a5c28ba51376534cc9be9dfdcfb70812f018aa8b3e73e32f39, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, io.buildah.version=1.38.0) Dec 28 11:33:02 managed-node2 systemd[1]: Started libpod-aa8d45554bfc3f5853e11e2b0ee0deb35b2207362cf9dd3437195c9fa9aa325b.scope - libcrun container. ░░ Subject: A start job for unit libpod-aa8d45554bfc3f5853e11e2b0ee0deb35b2207362cf9dd3437195c9fa9aa325b.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-aa8d45554bfc3f5853e11e2b0ee0deb35b2207362cf9dd3437195c9fa9aa325b.scope has finished successfully. ░░ ░░ The job identifier is 2608. Dec 28 11:33:02 managed-node2 podman[29095]: 2024-12-28 11:33:02.214681625 -0500 EST m=+10.500592798 container init aa8d45554bfc3f5853e11e2b0ee0deb35b2207362cf9dd3437195c9fa9aa325b (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=00b5192a1328a4a5c28ba51376534cc9be9dfdcfb70812f018aa8b3e73e32f39, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Dec 28 11:33:02 managed-node2 podman[29095]: 2024-12-28 11:33:02.217084923 -0500 EST m=+10.502996095 container start aa8d45554bfc3f5853e11e2b0ee0deb35b2207362cf9dd3437195c9fa9aa325b (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=00b5192a1328a4a5c28ba51376534cc9be9dfdcfb70812f018aa8b3e73e32f39, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Dec 28 11:33:02 managed-node2 podman[29095]: 2024-12-28 11:33:02.222187238 -0500 EST m=+10.508098487 pod start 00b5192a1328a4a5c28ba51376534cc9be9dfdcfb70812f018aa8b3e73e32f39 (image=, name=httpd3) Dec 28 11:33:02 managed-node2 systemd[1]: Started podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished successfully. ░░ ░░ The job identifier is 2501. Dec 28 11:33:02 managed-node2 podman[29095]: Pod: Dec 28 11:33:02 managed-node2 podman[29095]: 00b5192a1328a4a5c28ba51376534cc9be9dfdcfb70812f018aa8b3e73e32f39 Dec 28 11:33:02 managed-node2 podman[29095]: Container: Dec 28 11:33:02 managed-node2 podman[29095]: aa8d45554bfc3f5853e11e2b0ee0deb35b2207362cf9dd3437195c9fa9aa325b Dec 28 11:33:02 managed-node2 sudo[29347]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-htdnlxjuuhpujmowyetwqvegbdirbcnl ; /usr/bin/python3.12 /var/tmp/ansible-tmp-1735403582.6765993-17118-157836660933400/AnsiballZ_command.py' Dec 28 11:33:02 managed-node2 sudo[29347]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-29347) opened. Dec 28 11:33:02 managed-node2 sudo[29347]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Dec 28 11:33:03 managed-node2 python3.12[29350]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:33:03 managed-node2 systemd[23102]: Started podman-29358.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 112. Dec 28 11:33:03 managed-node2 sudo[29347]: pam_unix(sudo:session): session closed for user podman_basic_user Dec 28 11:33:03 managed-node2 python3.12[29498]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:33:03 managed-node2 python3.12[29637]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:33:04 managed-node2 sudo[29819]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nhkiqonpbohiabdlrjdubfbcitdnmczm ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1735403584.0463102-17178-155139140028051/AnsiballZ_command.py' Dec 28 11:33:04 managed-node2 sudo[29819]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-29819) opened. Dec 28 11:33:04 managed-node2 sudo[29819]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Dec 28 11:33:04 managed-node2 python3.12[29822]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --user list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd1[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:33:04 managed-node2 sudo[29819]: pam_unix(sudo:session): session closed for user podman_basic_user Dec 28 11:33:04 managed-node2 python3.12[29956]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd2[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:33:05 managed-node2 python3.12[30090]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd3[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:33:06 managed-node2 python3.12[30224]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15001/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:33:06 managed-node2 python3.12[30357]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15002/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:33:06 managed-node2 python3.12[30488]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:33:07 managed-node2 python3.12[30620]: ansible-file Invoked with path=/etc/containers/storage.conf state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:33:07 managed-node2 python3.12[30751]: ansible-file Invoked with path=/tmp/lsr_ib6aykrx_podman state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:33:10 managed-node2 python3.12[30925]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Dec 28 11:33:11 managed-node2 python3.12[31098]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:33:12 managed-node2 python3.12[31229]: ansible-ansible.legacy.dnf Invoked with name=['python3-pyasn1', 'python3-cryptography', 'python3-dbus'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 28 11:33:14 managed-node2 python3.12[31365]: ansible-ansible.legacy.dnf Invoked with name=['certmonger', 'python3-packaging'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 28 11:33:15 managed-node2 dbus-broker-launch[615]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Dec 28 11:33:15 managed-node2 dbus-broker-launch[23576]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Dec 28 11:33:15 managed-node2 dbus-broker-launch[615]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Dec 28 11:33:15 managed-node2 dbus-broker-launch[23576]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Dec 28 11:33:15 managed-node2 dbus-broker-launch[23576]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Dec 28 11:33:16 managed-node2 dbus-broker-launch[615]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Dec 28 11:33:16 managed-node2 dbus-broker-launch[23576]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Dec 28 11:33:16 managed-node2 dbus-broker-launch[23576]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Dec 28 11:33:16 managed-node2 dbus-broker-launch[23576]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Dec 28 11:33:16 managed-node2 systemd[1]: Reload requested from client PID 31373 ('systemctl') (unit session-5.scope)... Dec 28 11:33:16 managed-node2 systemd[1]: Reloading... Dec 28 11:33:16 managed-node2 systemd[1]: Reloading finished in 226 ms. Dec 28 11:33:16 managed-node2 systemd[1]: Started run-rc1d5f444a1bd4661900e9c55fb0359db.service - /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-rc1d5f444a1bd4661900e9c55fb0359db.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-rc1d5f444a1bd4661900e9c55fb0359db.service has finished successfully. ░░ ░░ The job identifier is 2619. Dec 28 11:33:16 managed-node2 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 2697. Dec 28 11:33:16 managed-node2 systemd[1]: Reload requested from client PID 31436 ('systemctl') (unit session-5.scope)... Dec 28 11:33:16 managed-node2 systemd[1]: Reloading... Dec 28 11:33:16 managed-node2 systemd[1]: Reloading finished in 356 ms. Dec 28 11:33:16 managed-node2 systemd[1]: Queuing reload/restart jobs for marked units… Dec 28 11:33:17 managed-node2 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Dec 28 11:33:17 managed-node2 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 2697. Dec 28 11:33:17 managed-node2 systemd[1]: run-rc1d5f444a1bd4661900e9c55fb0359db.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-rc1d5f444a1bd4661900e9c55fb0359db.service has successfully entered the 'dead' state. Dec 28 11:33:17 managed-node2 python3.12[31630]: ansible-file Invoked with name=/etc/certmonger//pre-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//pre-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:33:18 managed-node2 python3.12[31761]: ansible-file Invoked with name=/etc/certmonger//post-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//post-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:33:19 managed-node2 python3.12[31892]: ansible-ansible.legacy.systemd Invoked with name=certmonger state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Dec 28 11:33:19 managed-node2 systemd[1]: Reload requested from client PID 31895 ('systemctl') (unit session-5.scope)... Dec 28 11:33:19 managed-node2 systemd[1]: Reloading... Dec 28 11:33:19 managed-node2 systemd[1]: Reloading finished in 218 ms. Dec 28 11:33:19 managed-node2 systemd[1]: Starting certmonger.service - Certificate monitoring and PKI enrollment... ░░ Subject: A start job for unit certmonger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit certmonger.service has begun execution. ░░ ░░ The job identifier is 2775. Dec 28 11:33:19 managed-node2 (rtmonger)[31952]: certmonger.service: Referenced but unset environment variable evaluates to an empty string: OPTS Dec 28 11:33:19 managed-node2 systemd[1]: Started certmonger.service - Certificate monitoring and PKI enrollment. ░░ Subject: A start job for unit certmonger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit certmonger.service has finished successfully. ░░ ░░ The job identifier is 2775. Dec 28 11:33:20 managed-node2 python3.12[32110]: ansible-fedora.linux_system_roles.certificate_request Invoked with name=quadlet_demo dns=['localhost'] directory=/etc/pki/tls wait=True ca=self-sign __header=# # Ansible managed # # system_role:certificate provider_config_directory=/etc/certmonger provider=certmonger key_usage=['digitalSignature', 'keyEncipherment'] extended_key_usage=['id-kp-serverAuth', 'id-kp-clientAuth'] auto_renew=True ip=None email=None common_name=None country=None state=None locality=None organization=None organizational_unit=None contact_email=None key_size=None owner=None group=None mode=None principal=None run_before=None run_after=None Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 rsyslogd[655]: imjournal: journal files changed, reloading... [v8.2408.0-2.el10 try https://www.rsyslog.com/e/0 ] Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:20 managed-node2 certmonger[32126]: Certificate in file "/etc/pki/tls/certs/quadlet_demo.crt" issued by CA and saved. Dec 28 11:33:20 managed-node2 certmonger[31952]: 2024-12-28 11:33:20 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:21 managed-node2 python3.12[32257]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt Dec 28 11:33:21 managed-node2 python3.12[32388]: ansible-slurp Invoked with path=/etc/pki/tls/private/quadlet_demo.key src=/etc/pki/tls/private/quadlet_demo.key Dec 28 11:33:22 managed-node2 python3.12[32519]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt Dec 28 11:33:22 managed-node2 python3.12[32650]: ansible-ansible.legacy.command Invoked with _raw_params=getcert stop-tracking -f /etc/pki/tls/certs/quadlet_demo.crt _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:33:22 managed-node2 certmonger[31952]: 2024-12-28 11:33:22 [31952] Wrote to /var/lib/certmonger/requests/20241228163320 Dec 28 11:33:23 managed-node2 python3.12[32782]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:33:23 managed-node2 python3.12[32913]: ansible-file Invoked with path=/etc/pki/tls/private/quadlet_demo.key state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:33:23 managed-node2 python3.12[33044]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:33:24 managed-node2 python3.12[33175]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:33:25 managed-node2 python3.12[33306]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:33:26 managed-node2 python3.12[33568]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:33:27 managed-node2 python3.12[33705]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Dec 28 11:33:28 managed-node2 python3.12[33837]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:33:30 managed-node2 python3.12[33970]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:33:31 managed-node2 python3.12[34101]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:33:31 managed-node2 python3.12[34232]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 28 11:33:32 managed-node2 python3.12[34364]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Dec 28 11:33:33 managed-node2 python3.12[34497]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Dec 28 11:33:34 managed-node2 python3.12[34630]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Dec 28 11:33:34 managed-node2 python3.12[34761]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Dec 28 11:33:39 managed-node2 python3.12[35367]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:33:40 managed-node2 python3.12[35500]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:33:41 managed-node2 python3.12[35631]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.network follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 28 11:33:41 managed-node2 python3.12[35736]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1735403621.1117623-19088-239700523427720/.source.network dest=/etc/containers/systemd/quadlet-demo.network owner=root group=0 mode=0644 _original_basename=quadlet-demo.network follow=False checksum=e57c08d49aff4bae8daab138d913aeddaa8682a0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:33:42 managed-node2 python3.12[35867]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 28 11:33:42 managed-node2 systemd[1]: Reload requested from client PID 35868 ('systemctl') (unit session-5.scope)... Dec 28 11:33:42 managed-node2 systemd[1]: Reloading... Dec 28 11:33:42 managed-node2 systemd[1]: Reloading finished in 227 ms. Dec 28 11:33:43 managed-node2 python3.12[36054]: ansible-systemd Invoked with name=quadlet-demo-network.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Dec 28 11:33:43 managed-node2 systemd[1]: Starting quadlet-demo-network.service... ░░ Subject: A start job for unit quadlet-demo-network.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-network.service has begun execution. ░░ ░░ The job identifier is 2854. Dec 28 11:33:43 managed-node2 quadlet-demo-network[36058]: systemd-quadlet-demo Dec 28 11:33:43 managed-node2 systemd[1]: Finished quadlet-demo-network.service. ░░ Subject: A start job for unit quadlet-demo-network.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-network.service has finished successfully. ░░ ░░ The job identifier is 2854. Dec 28 11:33:44 managed-node2 python3.12[36196]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:33:46 managed-node2 python3.12[36329]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:33:46 managed-node2 python3.12[36460]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.volume follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 28 11:33:47 managed-node2 python3.12[36565]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1735403626.645213-19345-73721816979131/.source.volume dest=/etc/containers/systemd/quadlet-demo-mysql.volume owner=root group=0 mode=0644 _original_basename=quadlet-demo-mysql.volume follow=False checksum=585f8cbdf0ec73000f9227dcffbef71e9552ea4a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:33:47 managed-node2 python3.12[36696]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 28 11:33:47 managed-node2 systemd[1]: Reload requested from client PID 36697 ('systemctl') (unit session-5.scope)... Dec 28 11:33:47 managed-node2 systemd[1]: Reloading... Dec 28 11:33:48 managed-node2 systemd[1]: Reloading finished in 222 ms. Dec 28 11:33:48 managed-node2 python3.12[36883]: ansible-systemd Invoked with name=quadlet-demo-mysql-volume.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Dec 28 11:33:48 managed-node2 systemd[1]: Starting quadlet-demo-mysql-volume.service... ░░ Subject: A start job for unit quadlet-demo-mysql-volume.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql-volume.service has begun execution. ░░ ░░ The job identifier is 2938. Dec 28 11:33:48 managed-node2 podman[36887]: 2024-12-28 11:33:48.7119741 -0500 EST m=+0.024885300 volume create systemd-quadlet-demo-mysql Dec 28 11:33:48 managed-node2 quadlet-demo-mysql-volume[36887]: systemd-quadlet-demo-mysql Dec 28 11:33:48 managed-node2 systemd[1]: Finished quadlet-demo-mysql-volume.service. ░░ Subject: A start job for unit quadlet-demo-mysql-volume.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql-volume.service has finished successfully. ░░ ░░ The job identifier is 2938. Dec 28 11:33:49 managed-node2 python3.12[37026]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:33:50 managed-node2 python3.12[37159]: ansible-file Invoked with path=/tmp/quadlet_demo state=directory owner=root group=root mode=0777 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:33:58 managed-node2 podman[37298]: 2024-12-28 11:33:58.149897796 -0500 EST m=+6.600870142 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6 Dec 28 11:33:58 managed-node2 python3.12[37606]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:33:59 managed-node2 python3.12[37737]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 28 11:33:59 managed-node2 python3.12[37842]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/systemd/quadlet-demo-mysql.container owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1735403638.7432506-19770-169391064880553/.source.container _original_basename=.gnx8or29 follow=False checksum=ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:33:59 managed-node2 python3.12[37973]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 28 11:33:59 managed-node2 systemd[1]: Reload requested from client PID 37974 ('systemctl') (unit session-5.scope)... Dec 28 11:33:59 managed-node2 systemd[1]: Reloading... Dec 28 11:34:00 managed-node2 systemd[1]: Reloading finished in 221 ms. Dec 28 11:34:00 managed-node2 python3.12[38160]: ansible-systemd Invoked with name=quadlet-demo-mysql.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Dec 28 11:34:00 managed-node2 systemd[1]: Starting quadlet-demo-mysql.service... ░░ Subject: A start job for unit quadlet-demo-mysql.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql.service has begun execution. ░░ ░░ The job identifier is 3022. Dec 28 11:34:00 managed-node2 podman[38164]: 2024-12-28 11:34:00.770004415 -0500 EST m=+0.045043734 container create 38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Dec 28 11:34:00 managed-node2 NetworkManager[780]: [1735403640.7952] manager: (podman2): new Bridge device (/org/freedesktop/NetworkManager/Devices/9) Dec 28 11:34:00 managed-node2 kernel: podman2: port 1(veth2) entered blocking state Dec 28 11:34:00 managed-node2 kernel: podman2: port 1(veth2) entered disabled state Dec 28 11:34:00 managed-node2 kernel: veth2: entered allmulticast mode Dec 28 11:34:00 managed-node2 kernel: veth2: entered promiscuous mode Dec 28 11:34:00 managed-node2 kernel: podman2: port 1(veth2) entered blocking state Dec 28 11:34:00 managed-node2 kernel: podman2: port 1(veth2) entered forwarding state Dec 28 11:34:00 managed-node2 NetworkManager[780]: [1735403640.8108] device (veth2): carrier: link connected Dec 28 11:34:00 managed-node2 NetworkManager[780]: [1735403640.8120] manager: (veth2): new Veth device (/org/freedesktop/NetworkManager/Devices/10) Dec 28 11:34:00 managed-node2 NetworkManager[780]: [1735403640.8132] device (podman2): carrier: link connected Dec 28 11:34:00 managed-node2 (udev-worker)[38180]: Network interface NamePolicy= disabled on kernel command line. Dec 28 11:34:00 managed-node2 (udev-worker)[38179]: Network interface NamePolicy= disabled on kernel command line. Dec 28 11:34:00 managed-node2 podman[38164]: 2024-12-28 11:34:00.750797382 -0500 EST m=+0.025836842 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6 Dec 28 11:34:00 managed-node2 NetworkManager[780]: [1735403640.8602] device (podman2): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Dec 28 11:34:00 managed-node2 NetworkManager[780]: [1735403640.8632] device (podman2): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Dec 28 11:34:00 managed-node2 NetworkManager[780]: [1735403640.8657] device (podman2): Activation: starting connection 'podman2' (171d65dc-0cba-40a2-bbb1-88ff2227a67f) Dec 28 11:34:00 managed-node2 NetworkManager[780]: [1735403640.8675] device (podman2): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Dec 28 11:34:00 managed-node2 NetworkManager[780]: [1735403640.8683] device (podman2): state change: prepare -> config (reason 'none', managed-type: 'external') Dec 28 11:34:00 managed-node2 NetworkManager[780]: [1735403640.8702] device (podman2): state change: config -> ip-config (reason 'none', managed-type: 'external') Dec 28 11:34:00 managed-node2 NetworkManager[780]: [1735403640.8711] device (podman2): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Dec 28 11:34:00 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 3109. Dec 28 11:34:00 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 3109. Dec 28 11:34:00 managed-node2 NetworkManager[780]: [1735403640.9046] device (podman2): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Dec 28 11:34:00 managed-node2 NetworkManager[780]: [1735403640.9052] device (podman2): state change: secondaries -> activated (reason 'none', managed-type: 'external') Dec 28 11:34:00 managed-node2 NetworkManager[780]: [1735403640.9061] device (podman2): Activation: successful, device activated. Dec 28 11:34:00 managed-node2 systemd[1]: Started 38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca-73ebe49c63f1a996.timer - /usr/bin/podman healthcheck run 38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca. ░░ Subject: A start job for unit 38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca-73ebe49c63f1a996.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit 38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca-73ebe49c63f1a996.timer has finished successfully. ░░ ░░ The job identifier is 3188. Dec 28 11:34:00 managed-node2 podman[38164]: 2024-12-28 11:34:00.968366288 -0500 EST m=+0.243405744 container init 38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Dec 28 11:34:00 managed-node2 systemd[1]: Started quadlet-demo-mysql.service. ░░ Subject: A start job for unit quadlet-demo-mysql.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql.service has finished successfully. ░░ ░░ The job identifier is 3022. Dec 28 11:34:01 managed-node2 podman[38164]: 2024-12-28 11:34:01.025873976 -0500 EST m=+0.300913418 container start 38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Dec 28 11:34:01 managed-node2 quadlet-demo-mysql[38164]: 38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca Dec 28 11:34:01 managed-node2 podman[38228]: 2024-12-28 11:34:01.181978178 -0500 EST m=+0.138110250 container health_status 38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=0, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Dec 28 11:34:01 managed-node2 python3.12[38418]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:34:03 managed-node2 python3.12[38562]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:34:03 managed-node2 python3.12[38693]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/envoy-proxy-configmap.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 28 11:34:03 managed-node2 python3.12[38798]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1735403643.3933573-19976-127622001330519/.source.yml dest=/etc/containers/systemd/envoy-proxy-configmap.yml owner=root group=0 mode=0644 _original_basename=envoy-proxy-configmap.yml follow=False checksum=d681c7d56f912150d041873e880818b22a90c188 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:34:04 managed-node2 python3.12[38953]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 28 11:34:04 managed-node2 systemd[1]: Reload requested from client PID 38954 ('systemctl') (unit session-5.scope)... Dec 28 11:34:04 managed-node2 systemd[1]: Reloading... Dec 28 11:34:04 managed-node2 systemd[1]: Reloading finished in 239 ms. Dec 28 11:34:05 managed-node2 python3.12[39141]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:34:07 managed-node2 python3.12[39302]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:34:07 managed-node2 python3.12[39442]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 28 11:34:08 managed-node2 python3.12[39547]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/systemd/quadlet-demo.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1735403647.2769017-20133-246311741527282/.source.yml _original_basename=.ssqbkyw_ follow=False checksum=998dccde0483b1654327a46ddd89cbaa47650370 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:34:08 managed-node2 python3.12[39678]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 28 11:34:08 managed-node2 systemd[1]: Reload requested from client PID 39686 ('systemctl') (unit session-5.scope)... Dec 28 11:34:08 managed-node2 systemd[1]: Reloading... Dec 28 11:34:09 managed-node2 systemd[1]: Reloading finished in 229 ms. Dec 28 11:34:10 managed-node2 python3.12[39873]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:34:10 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Dec 28 11:34:10 managed-node2 python3.12[40006]: ansible-slurp Invoked with path=/etc/containers/systemd/quadlet-demo.yml src=/etc/containers/systemd/quadlet-demo.yml Dec 28 11:34:11 managed-node2 python3.12[40161]: ansible-file Invoked with path=/tmp/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:34:12 managed-node2 python3.12[40292]: ansible-file Invoked with path=/tmp/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:34:23 managed-node2 podman[40432]: 2024-12-28 11:34:23.845732743 -0500 EST m=+11.306967134 image pull fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache Dec 28 11:34:29 managed-node2 podman[40852]: 2024-12-28 11:34:29.041429179 -0500 EST m=+4.728776640 image pull 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0 Dec 28 11:34:29 managed-node2 python3.12[41119]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:34:29 managed-node2 python3.12[41250]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.kube follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Dec 28 11:34:30 managed-node2 python3.12[41355]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1735403669.5789778-20856-168524673727792/.source.kube dest=/etc/containers/systemd/quadlet-demo.kube owner=root group=0 mode=0644 _original_basename=quadlet-demo.kube follow=False checksum=7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:34:30 managed-node2 python3.12[41486]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 28 11:34:30 managed-node2 systemd[1]: Reload requested from client PID 41487 ('systemctl') (unit session-5.scope)... Dec 28 11:34:30 managed-node2 systemd[1]: Reloading... Dec 28 11:34:31 managed-node2 systemd[1]: Reloading finished in 233 ms. Dec 28 11:34:31 managed-node2 podman[41652]: 2024-12-28 11:34:31.417261378 -0500 EST m=+0.139690391 container health_status 38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=0, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Dec 28 11:34:31 managed-node2 python3.12[41681]: ansible-systemd Invoked with name=quadlet-demo.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Dec 28 11:34:31 managed-node2 systemd[1]: Starting quadlet-demo.service... ░░ Subject: A start job for unit quadlet-demo.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo.service has begun execution. ░░ ░░ The job identifier is 3422. Dec 28 11:34:31 managed-node2 quadlet-demo[41693]: Pods stopped: Dec 28 11:34:31 managed-node2 quadlet-demo[41693]: Pods removed: Dec 28 11:34:31 managed-node2 quadlet-demo[41693]: Secrets removed: Dec 28 11:34:31 managed-node2 quadlet-demo[41693]: Volumes removed: Dec 28 11:34:31 managed-node2 podman[41693]: 2024-12-28 11:34:31.677835021 -0500 EST m=+0.030587276 volume create wp-pv-claim Dec 28 11:34:31 managed-node2 podman[41693]: 2024-12-28 11:34:31.769340308 -0500 EST m=+0.122092581 container create 421487fe43893496f8f6d08b7e9e4d0d8c89fa06866bc863189e3be24cce4b07 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 28 11:34:31 managed-node2 podman[41693]: 2024-12-28 11:34:31.775796735 -0500 EST m=+0.128549016 volume create envoy-proxy-config Dec 28 11:34:31 managed-node2 podman[41693]: 2024-12-28 11:34:31.781146214 -0500 EST m=+0.133898462 volume create envoy-certificates Dec 28 11:34:31 managed-node2 systemd[1]: Created slice machine-libpod_pod_55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516.slice - cgroup machine-libpod_pod_55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516.slice. ░░ Subject: A start job for unit machine-libpod_pod_55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516.slice has finished successfully. ░░ ░░ The job identifier is 3509. Dec 28 11:34:31 managed-node2 podman[41693]: 2024-12-28 11:34:31.823653704 -0500 EST m=+0.176405950 container create 6e505f6ec50bb00a7d4244a37f43e7ab8bab5a3562ba6fba14835475e734ee2d (image=localhost/podman-pause:5.3.1-1733097600, name=55132399920f-infra, pod_id=55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Dec 28 11:34:31 managed-node2 podman[41693]: 2024-12-28 11:34:31.828087967 -0500 EST m=+0.180840227 pod create 55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516 (image=, name=quadlet-demo) Dec 28 11:34:31 managed-node2 podman[41693]: 2024-12-28 11:34:31.862683945 -0500 EST m=+0.215436266 container create dcf01e22c906c7af497984418e9893491d95c19bded1bbe9cc679fe1b1935e09 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 28 11:34:31 managed-node2 podman[41693]: 2024-12-28 11:34:31.8355441 -0500 EST m=+0.188296491 image pull fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache Dec 28 11:34:31 managed-node2 podman[41693]: 2024-12-28 11:34:31.891170667 -0500 EST m=+0.243922983 container create e1dba157116cafdf7b750001b22bcc9076eff0c11fabcdcfa27d5f61bb6f62c7 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 28 11:34:31 managed-node2 podman[41693]: 2024-12-28 11:34:31.89164498 -0500 EST m=+0.244397237 container restart 421487fe43893496f8f6d08b7e9e4d0d8c89fa06866bc863189e3be24cce4b07 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 28 11:34:31 managed-node2 systemd[23102]: Starting grub-boot-success.service - Mark boot as successful... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 118. Dec 28 11:34:31 managed-node2 systemd[1]: Started libpod-421487fe43893496f8f6d08b7e9e4d0d8c89fa06866bc863189e3be24cce4b07.scope - libcrun container. ░░ Subject: A start job for unit libpod-421487fe43893496f8f6d08b7e9e4d0d8c89fa06866bc863189e3be24cce4b07.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-421487fe43893496f8f6d08b7e9e4d0d8c89fa06866bc863189e3be24cce4b07.scope has finished successfully. ░░ ░░ The job identifier is 3515. Dec 28 11:34:31 managed-node2 systemd[23102]: Finished grub-boot-success.service - Mark boot as successful. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 118. Dec 28 11:34:31 managed-node2 podman[41693]: 2024-12-28 11:34:31.938438085 -0500 EST m=+0.291190514 container init 421487fe43893496f8f6d08b7e9e4d0d8c89fa06866bc863189e3be24cce4b07 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 28 11:34:31 managed-node2 podman[41693]: 2024-12-28 11:34:31.941536497 -0500 EST m=+0.294288848 container start 421487fe43893496f8f6d08b7e9e4d0d8c89fa06866bc863189e3be24cce4b07 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 28 11:34:31 managed-node2 podman[41693]: 2024-12-28 11:34:31.870177714 -0500 EST m=+0.222930103 image pull 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0 Dec 28 11:34:31 managed-node2 kernel: podman2: port 2(veth3) entered blocking state Dec 28 11:34:31 managed-node2 kernel: podman2: port 2(veth3) entered disabled state Dec 28 11:34:31 managed-node2 kernel: veth3: entered allmulticast mode Dec 28 11:34:31 managed-node2 kernel: veth3: entered promiscuous mode Dec 28 11:34:31 managed-node2 kernel: podman2: port 2(veth3) entered blocking state Dec 28 11:34:31 managed-node2 kernel: podman2: port 2(veth3) entered forwarding state Dec 28 11:34:31 managed-node2 NetworkManager[780]: [1735403671.9773] manager: (veth3): new Veth device (/org/freedesktop/NetworkManager/Devices/11) Dec 28 11:34:31 managed-node2 NetworkManager[780]: [1735403671.9839] device (veth3): carrier: link connected Dec 28 11:34:32 managed-node2 (udev-worker)[41715]: Network interface NamePolicy= disabled on kernel command line. Dec 28 11:34:32 managed-node2 systemd[1]: Started libpod-6e505f6ec50bb00a7d4244a37f43e7ab8bab5a3562ba6fba14835475e734ee2d.scope - libcrun container. ░░ Subject: A start job for unit libpod-6e505f6ec50bb00a7d4244a37f43e7ab8bab5a3562ba6fba14835475e734ee2d.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-6e505f6ec50bb00a7d4244a37f43e7ab8bab5a3562ba6fba14835475e734ee2d.scope has finished successfully. ░░ ░░ The job identifier is 3522. Dec 28 11:34:32 managed-node2 podman[41693]: 2024-12-28 11:34:32.061572321 -0500 EST m=+0.414324662 container init 6e505f6ec50bb00a7d4244a37f43e7ab8bab5a3562ba6fba14835475e734ee2d (image=localhost/podman-pause:5.3.1-1733097600, name=55132399920f-infra, pod_id=55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Dec 28 11:34:32 managed-node2 podman[41693]: 2024-12-28 11:34:32.064370179 -0500 EST m=+0.417122543 container start 6e505f6ec50bb00a7d4244a37f43e7ab8bab5a3562ba6fba14835475e734ee2d (image=localhost/podman-pause:5.3.1-1733097600, name=55132399920f-infra, pod_id=55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 28 11:34:32 managed-node2 systemd[1]: Started libpod-dcf01e22c906c7af497984418e9893491d95c19bded1bbe9cc679fe1b1935e09.scope - libcrun container. ░░ Subject: A start job for unit libpod-dcf01e22c906c7af497984418e9893491d95c19bded1bbe9cc679fe1b1935e09.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-dcf01e22c906c7af497984418e9893491d95c19bded1bbe9cc679fe1b1935e09.scope has finished successfully. ░░ ░░ The job identifier is 3529. Dec 28 11:34:32 managed-node2 podman[41693]: 2024-12-28 11:34:32.20135626 -0500 EST m=+0.554108595 container init dcf01e22c906c7af497984418e9893491d95c19bded1bbe9cc679fe1b1935e09 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 28 11:34:32 managed-node2 podman[41693]: 2024-12-28 11:34:32.204788464 -0500 EST m=+0.557540798 container start dcf01e22c906c7af497984418e9893491d95c19bded1bbe9cc679fe1b1935e09 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 28 11:34:32 managed-node2 systemd[1]: Started libpod-e1dba157116cafdf7b750001b22bcc9076eff0c11fabcdcfa27d5f61bb6f62c7.scope - libcrun container. ░░ Subject: A start job for unit libpod-e1dba157116cafdf7b750001b22bcc9076eff0c11fabcdcfa27d5f61bb6f62c7.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-e1dba157116cafdf7b750001b22bcc9076eff0c11fabcdcfa27d5f61bb6f62c7.scope has finished successfully. ░░ ░░ The job identifier is 3536. Dec 28 11:34:32 managed-node2 podman[41693]: 2024-12-28 11:34:32.274240424 -0500 EST m=+0.626992728 container init e1dba157116cafdf7b750001b22bcc9076eff0c11fabcdcfa27d5f61bb6f62c7 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 28 11:34:32 managed-node2 podman[41693]: 2024-12-28 11:34:32.277076826 -0500 EST m=+0.629829199 container start e1dba157116cafdf7b750001b22bcc9076eff0c11fabcdcfa27d5f61bb6f62c7 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 28 11:34:32 managed-node2 podman[41693]: 2024-12-28 11:34:32.282279783 -0500 EST m=+0.635032096 pod start 55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516 (image=, name=quadlet-demo) Dec 28 11:34:32 managed-node2 systemd[1]: Started quadlet-demo.service. ░░ Subject: A start job for unit quadlet-demo.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo.service has finished successfully. ░░ ░░ The job identifier is 3422. Dec 28 11:34:32 managed-node2 quadlet-demo[41693]: Volumes: Dec 28 11:34:32 managed-node2 quadlet-demo[41693]: wp-pv-claim Dec 28 11:34:32 managed-node2 quadlet-demo[41693]: Pod: Dec 28 11:34:32 managed-node2 quadlet-demo[41693]: 55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516 Dec 28 11:34:32 managed-node2 quadlet-demo[41693]: Containers: Dec 28 11:34:32 managed-node2 quadlet-demo[41693]: dcf01e22c906c7af497984418e9893491d95c19bded1bbe9cc679fe1b1935e09 Dec 28 11:34:32 managed-node2 quadlet-demo[41693]: e1dba157116cafdf7b750001b22bcc9076eff0c11fabcdcfa27d5f61bb6f62c7 Dec 28 11:34:33 managed-node2 python3.12[41975]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /etc/containers/systemd _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:34:33 managed-node2 python3.12[42180]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps -a _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:34:34 managed-node2 python3.12[42354]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:34:34 managed-node2 python3.12[42493]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod ps --ctr-ids --ctr-names --ctr-status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:34:34 managed-node2 python3.12[42632]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail; systemctl list-units | grep quadlet _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:34:35 managed-node2 python3.12[42766]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:34:41 managed-node2 python3.12[42897]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:34:46 managed-node2 python3.12[43028]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:34:52 managed-node2 python3.12[43159]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:34:57 managed-node2 python3.12[43290]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:35:02 managed-node2 podman[43312]: 2024-12-28 11:35:02.210587379 -0500 EST m=+0.092731080 container health_status 38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=0, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Dec 28 11:35:03 managed-node2 python3.12[43438]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:35:08 managed-node2 python3.12[43569]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:35:08 managed-node2 python3.12[43700]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:35:09 managed-node2 python3.12[43832]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps -a _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:35:09 managed-node2 python3.12[43970]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod ps --ctr-ids --ctr-names --ctr-status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:35:10 managed-node2 python3.12[44109]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail; systemctl list-units --all | grep quadlet _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:35:10 managed-node2 python3.12[44243]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /etc/systemd/system _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:35:12 managed-node2 python3.12[44506]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:35:13 managed-node2 python3.12[44643]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:35:15 managed-node2 python3.12[44776]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Dec 28 11:35:16 managed-node2 python3.12[44908]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Dec 28 11:35:17 managed-node2 python3.12[45041]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Dec 28 11:35:17 managed-node2 python3.12[45174]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Dec 28 11:35:18 managed-node2 python3.12[45305]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Dec 28 11:35:22 managed-node2 python3.12[45851]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:35:24 managed-node2 python3.12[45984]: ansible-systemd Invoked with name=quadlet-demo.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Dec 28 11:35:24 managed-node2 systemd[1]: Reload requested from client PID 45987 ('systemctl') (unit session-5.scope)... Dec 28 11:35:24 managed-node2 systemd[1]: Reloading... Dec 28 11:35:24 managed-node2 systemd[1]: Reloading finished in 241 ms. Dec 28 11:35:24 managed-node2 systemd[1]: Stopping quadlet-demo.service... ░░ Subject: A stop job for unit quadlet-demo.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit quadlet-demo.service has begun execution. ░░ ░░ The job identifier is 3621. Dec 28 11:35:24 managed-node2 systemd[1]: libpod-421487fe43893496f8f6d08b7e9e4d0d8c89fa06866bc863189e3be24cce4b07.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-421487fe43893496f8f6d08b7e9e4d0d8c89fa06866bc863189e3be24cce4b07.scope has successfully entered the 'dead' state. Dec 28 11:35:24 managed-node2 podman[46045]: 2024-12-28 11:35:24.415704543 -0500 EST m=+0.022108275 container died 421487fe43893496f8f6d08b7e9e4d0d8c89fa06866bc863189e3be24cce4b07 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 28 11:35:24 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-421487fe43893496f8f6d08b7e9e4d0d8c89fa06866bc863189e3be24cce4b07-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-421487fe43893496f8f6d08b7e9e4d0d8c89fa06866bc863189e3be24cce4b07-userdata-shm.mount has successfully entered the 'dead' state. Dec 28 11:35:24 managed-node2 podman[46045]: 2024-12-28 11:35:24.452313997 -0500 EST m=+0.058717808 container cleanup 421487fe43893496f8f6d08b7e9e4d0d8c89fa06866bc863189e3be24cce4b07 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 28 11:35:24 managed-node2 systemd[1]: var-lib-containers-storage-overlay-c872f51d28be9f251ae2b7aa5e7acfe842b7d57755516bfb50372464290bf299-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-c872f51d28be9f251ae2b7aa5e7acfe842b7d57755516bfb50372464290bf299-merged.mount has successfully entered the 'dead' state. Dec 28 11:35:24 managed-node2 podman[46054]: 2024-12-28 11:35:24.50074596 -0500 EST m=+0.024335061 pod stop 55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516 (image=, name=quadlet-demo) Dec 28 11:35:24 managed-node2 systemd[1]: libpod-6e505f6ec50bb00a7d4244a37f43e7ab8bab5a3562ba6fba14835475e734ee2d.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-6e505f6ec50bb00a7d4244a37f43e7ab8bab5a3562ba6fba14835475e734ee2d.scope has successfully entered the 'dead' state. Dec 28 11:35:24 managed-node2 podman[46054]: 2024-12-28 11:35:24.540795309 -0500 EST m=+0.064384462 container died 6e505f6ec50bb00a7d4244a37f43e7ab8bab5a3562ba6fba14835475e734ee2d (image=localhost/podman-pause:5.3.1-1733097600, name=55132399920f-infra, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Dec 28 11:35:24 managed-node2 systemd[1]: libpod-e1dba157116cafdf7b750001b22bcc9076eff0c11fabcdcfa27d5f61bb6f62c7.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-e1dba157116cafdf7b750001b22bcc9076eff0c11fabcdcfa27d5f61bb6f62c7.scope has successfully entered the 'dead' state. Dec 28 11:35:24 managed-node2 conmon[41753]: conmon e1dba157116cafdf7b75 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516.slice/libpod-e1dba157116cafdf7b750001b22bcc9076eff0c11fabcdcfa27d5f61bb6f62c7.scope/container/memory.events Dec 28 11:35:24 managed-node2 podman[46054]: 2024-12-28 11:35:24.553776863 -0500 EST m=+0.077365944 container died e1dba157116cafdf7b750001b22bcc9076eff0c11fabcdcfa27d5f61bb6f62c7 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 28 11:35:24 managed-node2 kernel: podman2: port 2(veth3) entered disabled state Dec 28 11:35:24 managed-node2 kernel: veth3 (unregistering): left allmulticast mode Dec 28 11:35:24 managed-node2 kernel: veth3 (unregistering): left promiscuous mode Dec 28 11:35:24 managed-node2 kernel: podman2: port 2(veth3) entered disabled state Dec 28 11:35:24 managed-node2 systemd[1]: var-lib-containers-storage-overlay-5604b49470b34774befc648ad2b8bc58656444dc0a400f324059cfb8e5167f7a-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-5604b49470b34774befc648ad2b8bc58656444dc0a400f324059cfb8e5167f7a-merged.mount has successfully entered the 'dead' state. Dec 28 11:35:24 managed-node2 podman[46054]: 2024-12-28 11:35:24.612430056 -0500 EST m=+0.136019099 container cleanup e1dba157116cafdf7b750001b22bcc9076eff0c11fabcdcfa27d5f61bb6f62c7 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 28 11:35:24 managed-node2 systemd[1]: libpod-dcf01e22c906c7af497984418e9893491d95c19bded1bbe9cc679fe1b1935e09.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-dcf01e22c906c7af497984418e9893491d95c19bded1bbe9cc679fe1b1935e09.scope has successfully entered the 'dead' state. Dec 28 11:35:24 managed-node2 conmon[41744]: conmon dcf01e22c906c7af4979 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516.slice/libpod-dcf01e22c906c7af497984418e9893491d95c19bded1bbe9cc679fe1b1935e09.scope/container/memory.events Dec 28 11:35:24 managed-node2 systemd[1]: run-netns-netns\x2d2057e5b2\x2d9497\x2d7fb8\x2dd44d\x2d91b30fc99c73.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2d2057e5b2\x2d9497\x2d7fb8\x2dd44d\x2d91b30fc99c73.mount has successfully entered the 'dead' state. Dec 28 11:35:24 managed-node2 podman[46054]: 2024-12-28 11:35:24.633994329 -0500 EST m=+0.157583697 container died dcf01e22c906c7af497984418e9893491d95c19bded1bbe9cc679fe1b1935e09 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 28 11:35:24 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-6e505f6ec50bb00a7d4244a37f43e7ab8bab5a3562ba6fba14835475e734ee2d-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-6e505f6ec50bb00a7d4244a37f43e7ab8bab5a3562ba6fba14835475e734ee2d-userdata-shm.mount has successfully entered the 'dead' state. Dec 28 11:35:24 managed-node2 podman[46054]: 2024-12-28 11:35:24.678251949 -0500 EST m=+0.201841021 container cleanup 6e505f6ec50bb00a7d4244a37f43e7ab8bab5a3562ba6fba14835475e734ee2d (image=localhost/podman-pause:5.3.1-1733097600, name=55132399920f-infra, pod_id=55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Dec 28 11:35:24 managed-node2 podman[46054]: 2024-12-28 11:35:24.697527047 -0500 EST m=+0.221116459 container cleanup dcf01e22c906c7af497984418e9893491d95c19bded1bbe9cc679fe1b1935e09 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 28 11:35:24 managed-node2 systemd[1]: Removed slice machine-libpod_pod_55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516.slice - cgroup machine-libpod_pod_55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516.slice. ░░ Subject: A stop job for unit machine-libpod_pod_55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516.slice has finished. ░░ ░░ The job identifier is 3623 and the job result is done. Dec 28 11:35:24 managed-node2 podman[46054]: 2024-12-28 11:35:24.705016999 -0500 EST m=+0.228606087 pod stop 55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516 (image=, name=quadlet-demo) Dec 28 11:35:24 managed-node2 systemd[1]: machine-libpod_pod_55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516.slice: Failed to open /run/systemd/transient/machine-libpod_pod_55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516.slice: No such file or directory Dec 28 11:35:24 managed-node2 podman[46054]: 2024-12-28 11:35:24.713673834 -0500 EST m=+0.237262852 pod stop 55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516 (image=, name=quadlet-demo) Dec 28 11:35:24 managed-node2 systemd[1]: machine-libpod_pod_55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516.slice: Failed to open /run/systemd/transient/machine-libpod_pod_55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516.slice: No such file or directory Dec 28 11:35:24 managed-node2 podman[46054]: 2024-12-28 11:35:24.734003393 -0500 EST m=+0.257592384 container remove dcf01e22c906c7af497984418e9893491d95c19bded1bbe9cc679fe1b1935e09 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 28 11:35:24 managed-node2 podman[46054]: 2024-12-28 11:35:24.751890053 -0500 EST m=+0.275479051 container remove e1dba157116cafdf7b750001b22bcc9076eff0c11fabcdcfa27d5f61bb6f62c7 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 28 11:35:24 managed-node2 podman[46054]: 2024-12-28 11:35:24.771859503 -0500 EST m=+0.295448495 container remove 6e505f6ec50bb00a7d4244a37f43e7ab8bab5a3562ba6fba14835475e734ee2d (image=localhost/podman-pause:5.3.1-1733097600, name=55132399920f-infra, pod_id=55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Dec 28 11:35:24 managed-node2 systemd[1]: machine-libpod_pod_55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516.slice: Failed to open /run/systemd/transient/machine-libpod_pod_55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516.slice: No such file or directory Dec 28 11:35:24 managed-node2 podman[46054]: 2024-12-28 11:35:24.779809907 -0500 EST m=+0.303398891 pod remove 55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516 (image=, name=quadlet-demo) Dec 28 11:35:24 managed-node2 podman[46054]: 2024-12-28 11:35:24.801376369 -0500 EST m=+0.324965464 container remove 421487fe43893496f8f6d08b7e9e4d0d8c89fa06866bc863189e3be24cce4b07 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Dec 28 11:35:24 managed-node2 quadlet-demo[46054]: Pods stopped: Dec 28 11:35:24 managed-node2 quadlet-demo[46054]: 55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516 Dec 28 11:35:24 managed-node2 quadlet-demo[46054]: Pods removed: Dec 28 11:35:24 managed-node2 quadlet-demo[46054]: 55132399920f59fbd37b99bb2d5733eb2002dc195800a2130dc93f3459289516 Dec 28 11:35:24 managed-node2 quadlet-demo[46054]: Secrets removed: Dec 28 11:35:24 managed-node2 quadlet-demo[46054]: Volumes removed: Dec 28 11:35:24 managed-node2 systemd[1]: quadlet-demo.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-demo.service has successfully entered the 'dead' state. Dec 28 11:35:24 managed-node2 systemd[1]: Stopped quadlet-demo.service. ░░ Subject: A stop job for unit quadlet-demo.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit quadlet-demo.service has finished. ░░ ░░ The job identifier is 3621 and the job result is done. Dec 28 11:35:25 managed-node2 python3.12[46228]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo.kube follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:35:25 managed-node2 systemd[1]: var-lib-containers-storage-overlay-9db6ba55aec8b933ff59d62a9a5e1ee62974eb2d901842f73549f4f9f8803dcc-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-9db6ba55aec8b933ff59d62a9a5e1ee62974eb2d901842f73549f4f9f8803dcc-merged.mount has successfully entered the 'dead' state. Dec 28 11:35:25 managed-node2 systemd[1]: var-lib-containers-storage-overlay-23d755fc1f0321618253dcb7e4f4be840b2a4d50af33d3407afe75794e45f50a-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-23d755fc1f0321618253dcb7e4f4be840b2a4d50af33d3407afe75794e45f50a-merged.mount has successfully entered the 'dead' state. Dec 28 11:35:26 managed-node2 python3.12[46492]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo.kube state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:35:26 managed-node2 python3.12[46623]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 28 11:35:26 managed-node2 systemd[1]: Reload requested from client PID 46624 ('systemctl') (unit session-5.scope)... Dec 28 11:35:26 managed-node2 systemd[1]: Reloading... Dec 28 11:35:27 managed-node2 systemd[1]: Reloading finished in 233 ms. Dec 28 11:35:27 managed-node2 python3.12[46810]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:35:28 managed-node2 podman[46811]: 2024-12-28 11:35:28.289391684 -0500 EST m=+0.555248901 image untag fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache Dec 28 11:35:28 managed-node2 podman[46811]: 2024-12-28 11:35:27.752055173 -0500 EST m=+0.017912331 image remove fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b Dec 28 11:35:28 managed-node2 podman[46811]: 2024-12-28 11:35:28.424012452 -0500 EST m=+0.689869716 image untag 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0 Dec 28 11:35:28 managed-node2 podman[46811]: 2024-12-28 11:35:28.289404341 -0500 EST m=+0.555261466 image remove 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d Dec 28 11:35:28 managed-node2 python3.12[46949]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:35:29 managed-node2 python3.12[47087]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:35:29 managed-node2 python3.12[47226]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:35:30 managed-node2 python3.12[47364]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:35:31 managed-node2 python3.12[47779]: ansible-service_facts Invoked Dec 28 11:35:32 managed-node2 podman[47787]: 2024-12-28 11:35:32.342394206 -0500 EST m=+0.073220892 container health_status 38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=1, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Dec 28 11:35:32 managed-node2 systemd[1]: 38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca-73ebe49c63f1a996.service: Main process exited, code=exited, status=125/n/a ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit 38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca-73ebe49c63f1a996.service has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 125. Dec 28 11:35:32 managed-node2 systemd[1]: 38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca-73ebe49c63f1a996.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit 38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca-73ebe49c63f1a996.service has entered the 'failed' state with result 'exit-code'. Dec 28 11:35:34 managed-node2 python3.12[48027]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:35:36 managed-node2 python3.12[48160]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:35:37 managed-node2 python3.12[48424]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:35:38 managed-node2 python3.12[48555]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 28 11:35:38 managed-node2 systemd[1]: Reload requested from client PID 48556 ('systemctl') (unit session-5.scope)... Dec 28 11:35:38 managed-node2 systemd[1]: Reloading... Dec 28 11:35:38 managed-node2 systemd[1]: Reloading finished in 232 ms. Dec 28 11:35:39 managed-node2 podman[48743]: 2024-12-28 11:35:39.190560828 -0500 EST m=+0.023776132 volume remove envoy-proxy-config Dec 28 11:35:39 managed-node2 podman[48882]: 2024-12-28 11:35:39.591973586 -0500 EST m=+0.021600230 volume remove envoy-certificates Dec 28 11:35:40 managed-node2 podman[49021]: 2024-12-28 11:35:40.027386076 -0500 EST m=+0.055564099 volume remove wp-pv-claim Dec 28 11:35:40 managed-node2 python3.12[49159]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:35:41 managed-node2 python3.12[49297]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:35:41 managed-node2 python3.12[49435]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:35:42 managed-node2 python3.12[49574]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:35:42 managed-node2 python3.12[49712]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:35:44 managed-node2 python3.12[50128]: ansible-service_facts Invoked Dec 28 11:35:46 managed-node2 python3.12[50366]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:35:48 managed-node2 python3.12[50499]: ansible-stat Invoked with path=/etc/containers/systemd/envoy-proxy-configmap.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:35:49 managed-node2 python3.12[50763]: ansible-file Invoked with path=/etc/containers/systemd/envoy-proxy-configmap.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:35:50 managed-node2 python3.12[50894]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 28 11:35:50 managed-node2 systemd[1]: Reload requested from client PID 50895 ('systemctl') (unit session-5.scope)... Dec 28 11:35:50 managed-node2 systemd[1]: Reloading... Dec 28 11:35:50 managed-node2 systemd[1]: Reloading finished in 225 ms. Dec 28 11:35:51 managed-node2 python3.12[51082]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:35:51 managed-node2 python3.12[51221]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:35:52 managed-node2 python3.12[51359]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:35:52 managed-node2 python3.12[51497]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:35:53 managed-node2 python3.12[51635]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:35:54 managed-node2 python3.12[52049]: ansible-service_facts Invoked Dec 28 11:35:57 managed-node2 python3.12[52287]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:35:59 managed-node2 python3.12[52420]: ansible-systemd Invoked with name=quadlet-demo-mysql.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Dec 28 11:35:59 managed-node2 systemd[1]: Reload requested from client PID 52423 ('systemctl') (unit session-5.scope)... Dec 28 11:35:59 managed-node2 systemd[1]: Reloading... Dec 28 11:35:59 managed-node2 systemd[1]: Reloading finished in 224 ms. Dec 28 11:35:59 managed-node2 systemd[1]: Stopping quadlet-demo-mysql.service... ░░ Subject: A stop job for unit quadlet-demo-mysql.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit quadlet-demo-mysql.service has begun execution. ░░ ░░ The job identifier is 3702. Dec 28 11:36:01 managed-node2 podman[52481]: 2024-12-28 11:36:01.082741783 -0500 EST m=+1.770938969 container died 38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Dec 28 11:36:01 managed-node2 systemd[1]: 38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca-73ebe49c63f1a996.timer: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit 38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca-73ebe49c63f1a996.timer has successfully entered the 'dead' state. Dec 28 11:36:01 managed-node2 systemd[1]: Stopped 38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca-73ebe49c63f1a996.timer - /usr/bin/podman healthcheck run 38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca. ░░ Subject: A stop job for unit 38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca-73ebe49c63f1a996.timer has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit 38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca-73ebe49c63f1a996.timer has finished. ░░ ░░ The job identifier is 3703 and the job result is done. Dec 28 11:36:01 managed-node2 kernel: podman2: port 1(veth2) entered disabled state Dec 28 11:36:01 managed-node2 kernel: veth2 (unregistering): left allmulticast mode Dec 28 11:36:01 managed-node2 kernel: veth2 (unregistering): left promiscuous mode Dec 28 11:36:01 managed-node2 kernel: podman2: port 1(veth2) entered disabled state Dec 28 11:36:01 managed-node2 NetworkManager[780]: [1735403761.1376] device (podman2): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Dec 28 11:36:01 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 3706. Dec 28 11:36:01 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 3706. Dec 28 11:36:01 managed-node2 systemd[1]: run-netns-netns\x2db96e91b4\x2db55c\x2d83e8\x2db0a1\x2d4cd95027b272.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2db96e91b4\x2db55c\x2d83e8\x2db0a1\x2d4cd95027b272.mount has successfully entered the 'dead' state. Dec 28 11:36:01 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca-userdata-shm.mount has successfully entered the 'dead' state. Dec 28 11:36:01 managed-node2 systemd[1]: var-lib-containers-storage-overlay-82207b1f4b19c8f3c513fbca694c9b29864a19d0529454b41a650c1df1ea0512-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-82207b1f4b19c8f3c513fbca694c9b29864a19d0529454b41a650c1df1ea0512-merged.mount has successfully entered the 'dead' state. Dec 28 11:36:01 managed-node2 podman[52481]: 2024-12-28 11:36:01.250806214 -0500 EST m=+1.939003210 container remove 38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Dec 28 11:36:01 managed-node2 quadlet-demo-mysql[52481]: 38908400944e8e44293286323dbb2e44769f7c0dc5be58ee2f79d7107a3f66ca Dec 28 11:36:01 managed-node2 systemd[1]: quadlet-demo-mysql.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-demo-mysql.service has successfully entered the 'dead' state. Dec 28 11:36:01 managed-node2 systemd[1]: Stopped quadlet-demo-mysql.service. ░░ Subject: A stop job for unit quadlet-demo-mysql.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit quadlet-demo-mysql.service has finished. ░░ ░░ The job identifier is 3702 and the job result is done. Dec 28 11:36:01 managed-node2 python3.12[52662]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:36:02 managed-node2 python3.12[52926]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.container state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:36:03 managed-node2 python3.12[53057]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 28 11:36:03 managed-node2 systemd[1]: Reload requested from client PID 53058 ('systemctl') (unit session-5.scope)... Dec 28 11:36:03 managed-node2 systemd[1]: Reloading... Dec 28 11:36:03 managed-node2 systemd[1]: Reloading finished in 226 ms. Dec 28 11:36:04 managed-node2 python3.12[53383]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:36:05 managed-node2 podman[53384]: 2024-12-28 11:36:05.008774186 -0500 EST m=+0.223258584 image untag dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6 Dec 28 11:36:05 managed-node2 podman[53384]: 2024-12-28 11:36:04.801991896 -0500 EST m=+0.016476331 image remove dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 Dec 28 11:36:05 managed-node2 python3.12[53522]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:36:06 managed-node2 python3.12[53661]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:36:06 managed-node2 python3.12[53800]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:36:06 managed-node2 python3.12[53938]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:36:08 managed-node2 python3.12[54354]: ansible-service_facts Invoked Dec 28 11:36:10 managed-node2 python3.12[54591]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:36:11 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Dec 28 11:36:11 managed-node2 python3.12[54725]: ansible-systemd Invoked with name=quadlet-demo-mysql-volume.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Dec 28 11:36:12 managed-node2 systemd[1]: Reload requested from client PID 54728 ('systemctl') (unit session-5.scope)... Dec 28 11:36:12 managed-node2 systemd[1]: Reloading... Dec 28 11:36:12 managed-node2 systemd[1]: Reloading finished in 217 ms. Dec 28 11:36:12 managed-node2 systemd[1]: quadlet-demo-mysql-volume.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-demo-mysql-volume.service has successfully entered the 'dead' state. Dec 28 11:36:12 managed-node2 systemd[1]: Stopped quadlet-demo-mysql-volume.service. ░░ Subject: A stop job for unit quadlet-demo-mysql-volume.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit quadlet-demo-mysql-volume.service has finished. ░░ ░░ The job identifier is 3785 and the job result is done. Dec 28 11:36:12 managed-node2 python3.12[54916]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.volume follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:36:13 managed-node2 python3.12[55180]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.volume state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:36:14 managed-node2 python3.12[55311]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 28 11:36:14 managed-node2 systemd[1]: Reload requested from client PID 55312 ('systemctl') (unit session-5.scope)... Dec 28 11:36:14 managed-node2 systemd[1]: Reloading... Dec 28 11:36:14 managed-node2 systemd[1]: Reloading finished in 211 ms. Dec 28 11:36:14 managed-node2 podman[55499]: 2024-12-28 11:36:14.925718935 -0500 EST m=+0.025591037 volume remove systemd-quadlet-demo-mysql Dec 28 11:36:15 managed-node2 python3.12[55636]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:36:15 managed-node2 python3.12[55774]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:36:16 managed-node2 python3.12[55913]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:36:16 managed-node2 python3.12[56051]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:36:17 managed-node2 python3.12[56190]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:36:18 managed-node2 python3.12[56605]: ansible-service_facts Invoked Dec 28 11:36:21 managed-node2 python3.12[56843]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:36:22 managed-node2 python3.12[56976]: ansible-systemd Invoked with name=quadlet-demo-network.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Dec 28 11:36:22 managed-node2 systemd[1]: Reload requested from client PID 56979 ('systemctl') (unit session-5.scope)... Dec 28 11:36:22 managed-node2 systemd[1]: Reloading... Dec 28 11:36:22 managed-node2 systemd[1]: Reloading finished in 210 ms. Dec 28 11:36:22 managed-node2 systemd[1]: quadlet-demo-network.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-demo-network.service has successfully entered the 'dead' state. Dec 28 11:36:22 managed-node2 systemd[1]: Stopped quadlet-demo-network.service. ░░ Subject: A stop job for unit quadlet-demo-network.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit quadlet-demo-network.service has finished. ░░ ░░ The job identifier is 3786 and the job result is done. Dec 28 11:36:23 managed-node2 python3.12[57167]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo.network follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Dec 28 11:36:24 managed-node2 python3.12[57431]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo.network state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Dec 28 11:36:24 managed-node2 python3.12[57562]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Dec 28 11:36:24 managed-node2 systemd[1]: Reload requested from client PID 57563 ('systemctl') (unit session-5.scope)... Dec 28 11:36:24 managed-node2 systemd[1]: Reloading... Dec 28 11:36:24 managed-node2 systemd[1]: Reloading finished in 219 ms. Dec 28 11:36:25 managed-node2 python3.12[57887]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:36:26 managed-node2 python3.12[58025]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:36:26 managed-node2 python3.12[58163]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:36:27 managed-node2 python3.12[58303]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:36:27 managed-node2 python3.12[58441]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:36:29 managed-node2 python3.12[58856]: ansible-service_facts Invoked Dec 28 11:36:31 managed-node2 python3.12[59094]: ansible-ansible.legacy.command Invoked with _raw_params=exec 1>&2 set -x set -o pipefail systemctl list-units --plain -l --all | grep quadlet || : systemctl list-unit-files --all | grep quadlet || : systemctl list-units --plain --failed -l --all | grep quadlet || : _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Dec 28 11:36:32 managed-node2 python3.12[59232]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None PLAY RECAP ********************************************************************* managed-node2 : ok=397 changed=47 unreachable=0 failed=2 skipped=376 rescued=2 ignored=0 TASKS RECAP ******************************************************************** Saturday 28 December 2024 11:36:32 -0500 (0:00:00.453) 0:03:23.018 ***** =============================================================================== Check web -------------------------------------------------------------- 33.47s /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:121 fedora.linux_system_roles.podman : Ensure container images are present -- 16.99s /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 fedora.linux_system_roles.podman : Ensure container images are present --- 7.19s /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 fedora.linux_system_roles.certificate : Ensure provider packages are installed --- 3.18s /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:23 fedora.linux_system_roles.podman : Stop and disable service ------------- 2.87s /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.34s /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.certificate : Ensure certificate role dependencies are installed --- 2.30s /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:5 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.20s /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.11s /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.06s /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.05s /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 1.94s /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Gathering Facts --------------------------------------------------------- 1.47s /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:9 fedora.linux_system_roles.certificate : Ensure provider service is running --- 1.38s /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:90 fedora.linux_system_roles.podman : Start service ------------------------ 1.32s /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 fedora.linux_system_roles.podman : Remove volumes ----------------------- 1.31s /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 fedora.linux_system_roles.certificate : Slurp the contents of the files --- 1.31s /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:152 fedora.linux_system_roles.podman : Stop and disable service ------------- 1.25s /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 fedora.linux_system_roles.certificate : Remove files -------------------- 1.23s /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:181 fedora.linux_system_roles.podman : Gather the package facts ------------- 1.19s /tmp/collections-Zud/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6