ansible-playbook [core 2.17.8]
config file = None
configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/local/lib/python3.12/site-packages/ansible
ansible collection location = /tmp/collections-yxu
executable location = /usr/local/bin/ansible-playbook
python version = 3.12.8 (main, Dec 3 2024, 00:00:00) [GCC 14.2.1 20241104 (Red Hat 14.2.1-6)] (/usr/bin/python3.12)
jinja version = 3.1.5
libyaml = True
No config file found; using defaults
running playbook inside collection fedora.linux_system_roles
redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug
redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug
redirecting (type: callback) ansible.builtin.profile_tasks to ansible.posix.profile_tasks
Skipping callback 'default', as we already have a stdout callback.
Skipping callback 'minimal', as we already have a stdout callback.
Skipping callback 'oneline', as we already have a stdout callback.
PLAYBOOK: tests_quadlet_demo.yml ***********************************************
2 plays in /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml
PLAY [all] *********************************************************************
TASK [Include vault variables] *************************************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:5
Saturday 08 February 2025 11:38:25 -0500 (0:00:00.014) 0:00:00.014 *****
[WARNING]: Found variable using reserved name: q
ok: [managed-node1] => {
"ansible_facts": {
"__podman_test_password": {
"__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n35383939616163653333633431363463313831383037386236646138333162396161356130303461\n3932623930643263313563336163316337643562333936360a363538636631313039343233383732\n38666530383538656639363465313230343533386130303833336434303438333161656262346562\n3362626538613031640a663330613638366132356534363534353239616666653466353961323533\n6565\n"
},
"mysql_container_root_password": {
"__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n61333932373230333539663035366431326163363166363036323963623131363530326231303634\n6635326161643165363366323062333334363730376631660a393566366139353861656364656661\n38653463363837336639363032646433666361646535366137303464623261313663643336306465\n6264663730656337310a343962353137386238383064646533366433333437303566656433386233\n34343235326665646661623131643335313236313131353661386338343366316261643634653633\n3832313034366536616531323963333234326461353130303532\n"
}
},
"ansible_included_var_files": [
"/tmp/podman-1gj/tests/vars/vault-variables.yml"
],
"changed": false
}
PLAY [Deploy the quadlet demo app] *********************************************
TASK [Gathering Facts] *********************************************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:9
Saturday 08 February 2025 11:38:25 -0500 (0:00:00.049) 0:00:00.067 *****
[WARNING]: Platform linux on host managed-node1 is using the discovered Python
interpreter at /usr/bin/python3.12, but future installation of another Python
interpreter could change the meaning of that path. See
https://docs.ansible.com/ansible-
core/2.17/reference_appendices/interpreter_discovery.html for more information.
ok: [managed-node1]
TASK [Test is only supported on x86_64] ****************************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:38
Saturday 08 February 2025 11:38:26 -0500 (0:00:01.407) 0:00:01.474 *****
skipping: [managed-node1] => {
"false_condition": "ansible_facts[\"architecture\"] != \"x86_64\""
}
TASK [End test] ****************************************************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:45
Saturday 08 February 2025 11:38:26 -0500 (0:00:00.015) 0:00:01.490 *****
META: end_play conditional evaluated to False, continuing play
skipping: [managed-node1] => {
"skip_reason": "end_play conditional evaluated to False, continuing play"
}
MSG:
end_play
TASK [Generate certificates] ***************************************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:51
Saturday 08 February 2025 11:38:26 -0500 (0:00:00.009) 0:00:01.499 *****
included: fedora.linux_system_roles.certificate for managed-node1
TASK [fedora.linux_system_roles.certificate : Set version specific variables] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:2
Saturday 08 February 2025 11:38:26 -0500 (0:00:00.043) 0:00:01.543 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml for managed-node1
TASK [fedora.linux_system_roles.certificate : Ensure ansible_facts used by role] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:2
Saturday 08 February 2025 11:38:26 -0500 (0:00:00.032) 0:00:01.576 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__certificate_required_facts | difference(ansible_facts.keys() | list) | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.certificate : Check if system is ostree] *******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:10
Saturday 08 February 2025 11:38:26 -0500 (0:00:00.050) 0:00:01.626 *****
ok: [managed-node1] => {
"changed": false,
"stat": {
"exists": false
}
}
TASK [fedora.linux_system_roles.certificate : Set flag to indicate system is ostree] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:15
Saturday 08 February 2025 11:38:27 -0500 (0:00:00.521) 0:00:02.147 *****
ok: [managed-node1] => {
"ansible_facts": {
"__certificate_is_ostree": false
},
"changed": false
}
TASK [fedora.linux_system_roles.certificate : Set platform/version specific variables] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:19
Saturday 08 February 2025 11:38:27 -0500 (0:00:00.023) 0:00:02.171 *****
skipping: [managed-node1] => (item=RedHat.yml) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "__vars_file is file",
"item": "RedHat.yml",
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item=CentOS.yml) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "__vars_file is file",
"item": "CentOS.yml",
"skip_reason": "Conditional result was False"
}
ok: [managed-node1] => (item=CentOS_10.yml) => {
"ansible_facts": {
"__certificate_certmonger_packages": [
"certmonger",
"python3-packaging"
]
},
"ansible_included_var_files": [
"/tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/certificate/vars/CentOS_10.yml"
],
"ansible_loop_var": "item",
"changed": false,
"item": "CentOS_10.yml"
}
ok: [managed-node1] => (item=CentOS_10.yml) => {
"ansible_facts": {
"__certificate_certmonger_packages": [
"certmonger",
"python3-packaging"
]
},
"ansible_included_var_files": [
"/tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/certificate/vars/CentOS_10.yml"
],
"ansible_loop_var": "item",
"changed": false,
"item": "CentOS_10.yml"
}
TASK [fedora.linux_system_roles.certificate : Ensure certificate role dependencies are installed] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:5
Saturday 08 February 2025 11:38:27 -0500 (0:00:00.041) 0:00:02.213 *****
ok: [managed-node1] => {
"changed": false,
"rc": 0,
"results": []
}
MSG:
Nothing to do
lsrpackages: python3-cryptography python3-dbus python3-pyasn1
TASK [fedora.linux_system_roles.certificate : Ensure provider packages are installed] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:23
Saturday 08 February 2025 11:38:28 -0500 (0:00:00.944) 0:00:03.157 *****
ok: [managed-node1] => (item=certmonger) => {
"__certificate_provider": "certmonger",
"ansible_loop_var": "__certificate_provider",
"changed": false,
"rc": 0,
"results": []
}
MSG:
Nothing to do
lsrpackages: certmonger python3-packaging
TASK [fedora.linux_system_roles.certificate : Ensure pre-scripts hooks directory exists] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:35
Saturday 08 February 2025 11:38:28 -0500 (0:00:00.851) 0:00:04.009 *****
ok: [managed-node1] => (item=certmonger) => {
"__certificate_provider": "certmonger",
"ansible_loop_var": "__certificate_provider",
"changed": false,
"gid": 0,
"group": "root",
"mode": "0700",
"owner": "root",
"path": "/etc/certmonger//pre-scripts",
"secontext": "unconfined_u:object_r:etc_t:s0",
"size": 6,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.certificate : Ensure post-scripts hooks directory exists] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:61
Saturday 08 February 2025 11:38:29 -0500 (0:00:00.515) 0:00:04.524 *****
ok: [managed-node1] => (item=certmonger) => {
"__certificate_provider": "certmonger",
"ansible_loop_var": "__certificate_provider",
"changed": false,
"gid": 0,
"group": "root",
"mode": "0700",
"owner": "root",
"path": "/etc/certmonger//post-scripts",
"secontext": "unconfined_u:object_r:etc_t:s0",
"size": 6,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.certificate : Ensure provider service is running] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:90
Saturday 08 February 2025 11:38:29 -0500 (0:00:00.408) 0:00:04.932 *****
ok: [managed-node1] => (item=certmonger) => {
"__certificate_provider": "certmonger",
"ansible_loop_var": "__certificate_provider",
"changed": false,
"enabled": true,
"name": "certmonger",
"state": "started",
"status": {
"AccessSELinuxContext": "system_u:object_r:certmonger_unit_file_t:s0",
"ActiveEnterTimestamp": "Sat 2025-02-08 11:34:22 EST",
"ActiveEnterTimestampMonotonic": "395540950",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "active",
"After": "network.target basic.target syslog.target systemd-journald.socket dbus-broker.service sysinit.target system.slice dbus.socket",
"AllowIsolate": "no",
"AssertResult": "yes",
"AssertTimestamp": "Sat 2025-02-08 11:34:22 EST",
"AssertTimestampMonotonic": "395517334",
"Before": "multi-user.target shutdown.target",
"BindLogSockets": "no",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"BusName": "org.fedorahosted.certmonger",
"CPUAccounting": "yes",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "452623000",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanLiveMount": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
"CleanResult": "success",
"CollectMode": "inactive",
"ConditionResult": "yes",
"ConditionTimestamp": "Sat 2025-02-08 11:34:22 EST",
"ConditionTimestampMonotonic": "395517330",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlGroup": "/system.slice/certmonger.service",
"ControlGroupId": "5130",
"ControlPID": "0",
"CoredumpFilter": "0x33",
"CoredumpReceive": "no",
"DebugInvocation": "no",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"DefaultStartupMemoryLow": "0",
"Delegate": "no",
"Description": "Certificate monitoring and PKI enrollment",
"DevicePolicy": "auto",
"DynamicUser": "no",
"EffectiveMemoryHigh": "3698229248",
"EffectiveMemoryMax": "3698229248",
"EffectiveTasksMax": "22364",
"EnvironmentFiles": "/etc/sysconfig/certmonger (ignore_errors=yes)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainHandoffTimestamp": "Sat 2025-02-08 11:34:22 EST",
"ExecMainHandoffTimestampMonotonic": "395530520",
"ExecMainPID": "10262",
"ExecMainStartTimestamp": "Sat 2025-02-08 11:34:22 EST",
"ExecMainStartTimestampMonotonic": "395518380",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/sbin/certmonger ; argv[]=/usr/sbin/certmonger -S -p /run/certmonger.pid -n $OPTS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStartEx": "{ path=/usr/sbin/certmonger ; argv[]=/usr/sbin/certmonger -S -p /run/certmonger.pid -n $OPTS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExitType": "main",
"ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FileDescriptorStorePreserve": "restart",
"FinalKillSignal": "9",
"FragmentPath": "/usr/lib/systemd/system/certmonger.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOReadBytes": "[not set]",
"IOReadOperations": "[not set]",
"IOSchedulingClass": "2",
"IOSchedulingPriority": "4",
"IOWeight": "[not set]",
"IOWriteBytes": "[not set]",
"IOWriteOperations": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "[no data]",
"IPEgressPackets": "[no data]",
"IPIngressBytes": "[no data]",
"IPIngressPackets": "[no data]",
"Id": "certmonger.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestamp": "Sat 2025-02-08 11:34:22 EST",
"InactiveExitTimestampMonotonic": "395518931",
"InvocationID": "d174bdc16bde462381b228e38c453f06",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "control-group",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "infinity",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "8388608",
"LimitMEMLOCKSoft": "8388608",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "524288",
"LimitNOFILESoft": "1024",
"LimitNPROC": "13977",
"LimitNPROCSoft": "13977",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "13977",
"LimitSIGPENDINGSoft": "13977",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LiveMountResult": "success",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "10262",
"ManagedOOMMemoryPressure": "auto",
"ManagedOOMMemoryPressureDurationUSec": "[not set]",
"ManagedOOMMemoryPressureLimit": "0",
"ManagedOOMPreference": "none",
"ManagedOOMSwap": "auto",
"MemoryAccounting": "yes",
"MemoryAvailable": "3176505344",
"MemoryCurrent": "1990656",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryKSM": "no",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemoryPeak": "11235328",
"MemoryPressureThresholdUSec": "200ms",
"MemoryPressureWatch": "auto",
"MemorySwapCurrent": "0",
"MemorySwapMax": "infinity",
"MemorySwapPeak": "0",
"MemoryZSwapCurrent": "0",
"MemoryZSwapMax": "infinity",
"MemoryZSwapWriteback": "yes",
"MountAPIVFS": "no",
"MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAPolicy": "n/a",
"Names": "certmonger.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMPolicy": "stop",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"OnSuccessJobMode": "fail",
"PIDFile": "/run/certmonger.pid",
"PartOf": "dbus-broker.service",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateIPC": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivatePIDs": "no",
"PrivateTmp": "no",
"PrivateTmpEx": "no",
"PrivateUsers": "no",
"PrivateUsersEx": "no",
"ProcSubset": "all",
"ProtectClock": "no",
"ProtectControlGroups": "no",
"ProtectControlGroupsEx": "no",
"ProtectHome": "no",
"ProtectHostname": "no",
"ProtectKernelLogs": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectProc": "default",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"ReloadResult": "success",
"ReloadSignal": "1",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "sysinit.target dbus.socket system.slice",
"Restart": "no",
"RestartKillSignal": "15",
"RestartMaxDelayUSec": "infinity",
"RestartMode": "normal",
"RestartSteps": "0",
"RestartUSec": "100ms",
"RestartUSecNext": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RootEphemeral": "no",
"RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"RuntimeRandomizedExtraUSec": "0",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"SetLoginEnvironment": "no",
"Slice": "system.slice",
"StandardError": "inherit",
"StandardInput": "null",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StartupMemoryHigh": "infinity",
"StartupMemoryLow": "0",
"StartupMemoryMax": "infinity",
"StartupMemorySwapMax": "infinity",
"StartupMemoryZSwapMax": "infinity",
"StateChangeTimestamp": "Sat 2025-02-08 11:36:29 EST",
"StateChangeTimestampMonotonic": "522430745",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "running",
"SuccessAction": "none",
"SurviveFinalKillSignal": "no",
"SyslogFacility": "3",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "2147483646",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "1",
"TasksMax": "22364",
"TimeoutAbortUSec": "1min 30s",
"TimeoutCleanUSec": "infinity",
"TimeoutStartFailureMode": "terminate",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopFailureMode": "terminate",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "dbus",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "enabled",
"UtmpMode": "init",
"WantedBy": "multi-user.target",
"WatchdogSignal": "6",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.certificate : Ensure certificate requests] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:101
Saturday 08 February 2025 11:38:30 -0500 (0:00:00.981) 0:00:05.914 *****
changed: [managed-node1] => (item={'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}) => {
"ansible_loop_var": "item",
"changed": true,
"item": {
"ca": "self-sign",
"dns": [
"localhost"
],
"name": "quadlet_demo"
}
}
MSG:
Certificate requested (new).
TASK [fedora.linux_system_roles.certificate : Slurp the contents of the files] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:152
Saturday 08 February 2025 11:38:31 -0500 (0:00:00.876) 0:00:06.791 *****
ok: [managed-node1] => (item=['cert', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => {
"ansible_loop_var": "item",
"changed": false,
"content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURnekNDQW11Z0F3SUJBZ0lSQVBNSXZsQnhhRUpqdjRRUTEzdGcwd2N3RFFZSktvWklodmNOQVFFTEJRQXcKVURFZ01CNEdBMVVFQXd3WFRHOWpZV3dnVTJsbmJtbHVaeUJCZFhSb2IzSnBkSGt4TERBcUJnTlZCQU1NSTJZegpNRGhpWlRVd0xUY3hOamcwTWpZekxXSm1PRFF4TUdRM0xUZGlOakJrTXpBMU1CNFhEVEkxTURJd09ERTJNemd6Ck1Wb1hEVEkyTURJd09ERTJNelF5TWxvd0ZERVNNQkFHQTFVRUF4TUpiRzlqWVd4b2IzTjBNSUlCSWpBTkJna3EKaGtpRzl3MEJBUUVGQUFPQ0FROEFNSUlCQ2dLQ0FRRUF2bytNazlpTlg5ZU15akVrSjEyUmx3b2pJVnJEeFArTQprZTJOMlJPc1d2Y1RUbC9jN3pybjdnQXVialgwQjNUaXhBa1ZwVkVtcDFVVjNtMVlJeC9GRitkaldtbUZJL3VECjZ4UGVjVFB5VkY3cWFNV0wyTTV4cnByNmJTY0F0L3FCUFZpOVVFUFlUeXpnYm5NOXJ1YXNIZm8rU3hvV2ZFVHUKSkNIR2NjaWdwV3I2dTZWL1J2REIxNTN2WHE3dXlVdGhSSzdjdnorRDhLYXBEc0thOFRBcFVzb0xVSUdEc3JSSgpEVi9rUTNTTWZXYnlXdnZ3UDB5VitGK2l4QlRaWHZINm13Q2VjOUhMYWNhWjRVZ3dWUjhPVnp4MVJGNXQxemRVCmJabXVnL3FSeEJrT3llSjVkTFRYU09Va3NSUi92TU8vMkk5bWc5OHF4TENNTm1oclRiYVljd0lEQVFBQm80R1QKTUlHUU1Bc0dBMVVkRHdRRUF3SUZvREFVQmdOVkhSRUVEVEFMZ2dsc2IyTmhiR2h2YzNRd0hRWURWUjBsQkJZdwpGQVlJS3dZQkJRVUhBd0VHQ0NzR0FRVUZCd01DTUF3R0ExVWRFd0VCL3dRQ01BQXdIUVlEVlIwT0JCWUVGSE4wCnFaelZnYmhLN29oTk52QmVTWE5IZkxIdU1COEdBMVVkSXdRWU1CYUFGSGgwV0phOGJNQ2dyUXJ0allMWVBxS24KY2lrUE1BMEdDU3FHU0liM0RRRUJDd1VBQTRJQkFRQW13bXp4VjZtSUpNdlArd3hQRGRTb3ZwaW1qWGMrMFlhQgpGQnY2SEVTZWlWYXphMHhubHcya0NkcTVOSlZ5M2d4aUpMTElDZkEvZVQrbG1nL1VtbHlxVHlGbXhNTnZFYURuCkVYazNna3c0cDRqRm9LTTliN0w5NlVab0JZM3JuTmVreXVjYUJGSCs1ZXNmaW9lc0V6L2t6aXRUN0ttV3VCdU8KazNROWdTZmZ5YVRXS1pBSXExMEZhcFZ0TFZKZVJlbU9RSjRKS0ZNNEUzaDNlbFBMNU9wS1pQT1ZYcVNNN3FSdgpzdUJHd2JPMHpWQkJyNFVxV0pJWk96ZTRpeW14WG1Obi9PUHBHZ0xzVEEvL0dVVXZXalBDWmt3cUNteklhQWNnClkramFLMENtdW5VeDYyR3J2b2JlRkFDK0E3d2tua2U5TkpnbnIyeXptMmVFZVE3R1NVR28KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=",
"encoding": "base64",
"item": [
"cert",
{
"ca": "self-sign",
"dns": [
"localhost"
],
"name": "quadlet_demo"
}
],
"source": "/etc/pki/tls/certs/quadlet_demo.crt"
}
ok: [managed-node1] => (item=['key', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => {
"ansible_loop_var": "item",
"changed": false,
"content": "LS0tLS1CRUdJTiBQUklWQVRFIEtFWS0tLS0tCk1JSUV2QUlCQURBTkJna3Foa2lHOXcwQkFRRUZBQVNDQktZd2dnU2lBZ0VBQW9JQkFRQytqNHlUMkkxZjE0eksKTVNRblhaR1hDaU1oV3NQRS80eVI3WTNaRTZ4YTl4Tk9YOXp2T3VmdUFDNXVOZlFIZE9MRUNSV2xVU2FuVlJYZQpiVmdqSDhVWDUyTmFhWVVqKzRQckU5NXhNL0pVWHVwb3hZdll6bkd1bXZwdEp3QzMrb0U5V0wxUVE5aFBMT0J1CmN6MnU1cXdkK2o1TEdoWjhSTzRrSWNaeHlLQ2xhdnE3cFg5RzhNSFhuZTllcnU3SlMyRkVydHkvUDRQd3Bxa08Kd3ByeE1DbFN5Z3RRZ1lPeXRFa05YK1JEZEl4OVp2SmErL0EvVEpYNFg2TEVGTmxlOGZxYkFKNXowY3RweHBuaApTREJWSHc1WFBIVkVYbTNYTjFSdG1hNkQrcEhFR1E3SjRubDB0TmRJNVNTeEZIKzh3Ny9ZajJhRDN5ckVzSXcyCmFHdE50cGh6QWdNQkFBRUNnZ0VBRlo5N1Q2RndkVklqcHlQSld2QXFuNDFtYkJmOUErTU5nOWVGVTM4QWZUZngKQ0dFblZEd0dha0s2QWdaOFFNWmFIdTE2OTgzTUNZdkZsZjRwVko3N3dka2ZOSGlwdzJuOFQ1WHJGVVJVT0krbQpvS25kMHdXbDc2NmxRUEoraWVIU2ZYQU8xMGREaWNScFdROFpqam1kaVM5R2wwWUlEQ1RVTXVSa1BMQUx6b2t4Cll0KzhHSzFQYW1uSjVYNG9sQks2cnl0ZzJaaUN5VWJZekRzSlpRa29saERZNkNpcllkNU1MOFM1SkVnVkxTd28KOUFFY0dkTDlvVks5aVkvTWFKNDY1YkMxaVNSNDVncWZNeVlBL2Q3bXhEYW5FWmFoeEZadituZ1NzY1Z6dG1iNQpMdXA2TWZ6R3RCa2p0dXorWHdqcnJLMmZaOHNON1R3THVFelROMHgzMlFLQmdRRHByb28vT1YvZ3dLS3ZxS1MwCjZGNHk5Ums4YjF0YnMzc0FwRWZ3YWtSa0J4TUQyQzNnNzZ3aUF2eEdXYmRQRi9lSHd5R01lMXlEYVR5LzkvTzYKc1UzN1J0ajI4bkFwcERIR1o5Sk1ZdW5SSWJQOHU4VGc2MUo2WGtRaE5BVDdUQjg3UE9KMnRvdXdYcHNtalEySApTc1BFS0FkM01Ya2xwM2J6L3ZzQnh2TlVpd0tCZ1FEUXdyWFY5WFJmVVZ2ZVhZUENxRmEzN1cvdUxvNDhxUUJxCjhxS0svYzRnY2VUejZxR3hUaFdwajFjK2haS1NYWFJNNVdqMDFYQ01JdjdIZENVdTEvWU1ZT052RGNoYXBsS3IKcHlLY3JJVEYycmtucmJ5ODB4bmZVQnBJdWU1WGh5YWNMcXVxaDg0VEc2amJVVnVHVitWWkx4VmlLbmVXRlpFQQpMMGpOQnV1QXVRS0JnSEFac2VvNGpOMFZyRFdXTWtVeXY1VzBiSUhJYmN3azRwbmhOVDJJelB3clJKendoTG5HCllBNEUzcDZsQlVIbEhuSzdVZ25CV0JVV1UvVkhibDJlRnVtM0IvZ1hQMlZLaGpKR3RlUzlBcXUzZms5OGlDcloKMkx3cElvZkhHNml0ME1HcktIeUlmNlNCRGU3RmRicjBTOHQ2NUhIeWdzcGtkclVBR2N1MWRCK1hBb0dBWmhVQQpjOXF1Q0RveGtUL3JRVkd0N2d6R01oTkQ3SGU3dzVCWVdWMFk0WmF3SFhJN2xRY1k0cXc4UGs5MHgrdTVCaTBjCk1MVDZieHhodDhpY3NoWmFzOXpnK2hva1FlZVkvMlVnTUxyUHhpeFpaQVM2Q1IrMVJCVFlkWDJ1QmRFSjJVK2QKRkN1S1lwK2c1M1lXS2xCcXp6eU4yVTNvNGxRemJvS3d0OGxOZWNrQ2dZQWZjSnJIS2Zkd2c1eFY4NVhvWGU1TAprVFVEeUhNeXhzcTZ0dDlYUCtxRlIrS1FtNFNoVWt6WDQzb3g3bm0rRHNlK211TGFLVmVZbG9XSThKVkJ1VDRlClhvdm14NWZRWFdqd0N4czJGSDFvYTg0N0sydFl3cTVWVWp3bjRFQnJVUVFnZnRRaEtTYmxDSFJJZUthcGowT0EKSHpneTFMLzFLcnlIam1FdWNrUEdYdz09Ci0tLS0tRU5EIFBSSVZBVEUgS0VZLS0tLS0K",
"encoding": "base64",
"item": [
"key",
{
"ca": "self-sign",
"dns": [
"localhost"
],
"name": "quadlet_demo"
}
],
"source": "/etc/pki/tls/private/quadlet_demo.key"
}
ok: [managed-node1] => (item=['ca', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => {
"ansible_loop_var": "item",
"changed": false,
"content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURnekNDQW11Z0F3SUJBZ0lSQVBNSXZsQnhhRUpqdjRRUTEzdGcwd2N3RFFZSktvWklodmNOQVFFTEJRQXcKVURFZ01CNEdBMVVFQXd3WFRHOWpZV3dnVTJsbmJtbHVaeUJCZFhSb2IzSnBkSGt4TERBcUJnTlZCQU1NSTJZegpNRGhpWlRVd0xUY3hOamcwTWpZekxXSm1PRFF4TUdRM0xUZGlOakJrTXpBMU1CNFhEVEkxTURJd09ERTJNemd6Ck1Wb1hEVEkyTURJd09ERTJNelF5TWxvd0ZERVNNQkFHQTFVRUF4TUpiRzlqWVd4b2IzTjBNSUlCSWpBTkJna3EKaGtpRzl3MEJBUUVGQUFPQ0FROEFNSUlCQ2dLQ0FRRUF2bytNazlpTlg5ZU15akVrSjEyUmx3b2pJVnJEeFArTQprZTJOMlJPc1d2Y1RUbC9jN3pybjdnQXVialgwQjNUaXhBa1ZwVkVtcDFVVjNtMVlJeC9GRitkaldtbUZJL3VECjZ4UGVjVFB5VkY3cWFNV0wyTTV4cnByNmJTY0F0L3FCUFZpOVVFUFlUeXpnYm5NOXJ1YXNIZm8rU3hvV2ZFVHUKSkNIR2NjaWdwV3I2dTZWL1J2REIxNTN2WHE3dXlVdGhSSzdjdnorRDhLYXBEc0thOFRBcFVzb0xVSUdEc3JSSgpEVi9rUTNTTWZXYnlXdnZ3UDB5VitGK2l4QlRaWHZINm13Q2VjOUhMYWNhWjRVZ3dWUjhPVnp4MVJGNXQxemRVCmJabXVnL3FSeEJrT3llSjVkTFRYU09Va3NSUi92TU8vMkk5bWc5OHF4TENNTm1oclRiYVljd0lEQVFBQm80R1QKTUlHUU1Bc0dBMVVkRHdRRUF3SUZvREFVQmdOVkhSRUVEVEFMZ2dsc2IyTmhiR2h2YzNRd0hRWURWUjBsQkJZdwpGQVlJS3dZQkJRVUhBd0VHQ0NzR0FRVUZCd01DTUF3R0ExVWRFd0VCL3dRQ01BQXdIUVlEVlIwT0JCWUVGSE4wCnFaelZnYmhLN29oTk52QmVTWE5IZkxIdU1COEdBMVVkSXdRWU1CYUFGSGgwV0phOGJNQ2dyUXJ0allMWVBxS24KY2lrUE1BMEdDU3FHU0liM0RRRUJDd1VBQTRJQkFRQW13bXp4VjZtSUpNdlArd3hQRGRTb3ZwaW1qWGMrMFlhQgpGQnY2SEVTZWlWYXphMHhubHcya0NkcTVOSlZ5M2d4aUpMTElDZkEvZVQrbG1nL1VtbHlxVHlGbXhNTnZFYURuCkVYazNna3c0cDRqRm9LTTliN0w5NlVab0JZM3JuTmVreXVjYUJGSCs1ZXNmaW9lc0V6L2t6aXRUN0ttV3VCdU8KazNROWdTZmZ5YVRXS1pBSXExMEZhcFZ0TFZKZVJlbU9RSjRKS0ZNNEUzaDNlbFBMNU9wS1pQT1ZYcVNNN3FSdgpzdUJHd2JPMHpWQkJyNFVxV0pJWk96ZTRpeW14WG1Obi9PUHBHZ0xzVEEvL0dVVXZXalBDWmt3cUNteklhQWNnClkramFLMENtdW5VeDYyR3J2b2JlRkFDK0E3d2tua2U5TkpnbnIyeXptMmVFZVE3R1NVR28KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=",
"encoding": "base64",
"item": [
"ca",
{
"ca": "self-sign",
"dns": [
"localhost"
],
"name": "quadlet_demo"
}
],
"source": "/etc/pki/tls/certs/quadlet_demo.crt"
}
TASK [fedora.linux_system_roles.certificate : Create return data] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:160
Saturday 08 February 2025 11:38:32 -0500 (0:00:01.176) 0:00:07.968 *****
ok: [managed-node1] => {
"ansible_facts": {
"certificate_test_certs": {
"quadlet_demo": {
"ca": "/etc/pki/tls/certs/quadlet_demo.crt",
"ca_content": "-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRAPMIvlBxaEJjv4QQ13tg0wcwDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMI2Yz\nMDhiZTUwLTcxNjg0MjYzLWJmODQxMGQ3LTdiNjBkMzA1MB4XDTI1MDIwODE2Mzgz\nMVoXDTI2MDIwODE2MzQyMlowFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvo+Mk9iNX9eMyjEkJ12RlwojIVrDxP+M\nke2N2ROsWvcTTl/c7zrn7gAubjX0B3TixAkVpVEmp1UV3m1YIx/FF+djWmmFI/uD\n6xPecTPyVF7qaMWL2M5xrpr6bScAt/qBPVi9UEPYTyzgbnM9ruasHfo+SxoWfETu\nJCHGccigpWr6u6V/RvDB153vXq7uyUthRK7cvz+D8KapDsKa8TApUsoLUIGDsrRJ\nDV/kQ3SMfWbyWvvwP0yV+F+ixBTZXvH6mwCec9HLacaZ4UgwVR8OVzx1RF5t1zdU\nbZmug/qRxBkOyeJ5dLTXSOUksRR/vMO/2I9mg98qxLCMNmhrTbaYcwIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFHN0\nqZzVgbhK7ohNNvBeSXNHfLHuMB8GA1UdIwQYMBaAFHh0WJa8bMCgrQrtjYLYPqKn\ncikPMA0GCSqGSIb3DQEBCwUAA4IBAQAmwmzxV6mIJMvP+wxPDdSovpimjXc+0YaB\nFBv6HESeiVaza0xnlw2kCdq5NJVy3gxiJLLICfA/eT+lmg/UmlyqTyFmxMNvEaDn\nEXk3gkw4p4jFoKM9b7L96UZoBY3rnNekyucaBFH+5esfioesEz/kzitT7KmWuBuO\nk3Q9gSffyaTWKZAIq10FapVtLVJeRemOQJ4JKFM4E3h3elPL5OpKZPOVXqSM7qRv\nsuBGwbO0zVBBr4UqWJIZOze4iymxXmNn/OPpGgLsTA//GUUvWjPCZkwqCmzIaAcg\nY+jaK0CmunUx62GrvobeFAC+A7wknke9NJgnr2yzm2eEeQ7GSUGo\n-----END CERTIFICATE-----\n",
"cert": "/etc/pki/tls/certs/quadlet_demo.crt",
"cert_content": "-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRAPMIvlBxaEJjv4QQ13tg0wcwDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMI2Yz\nMDhiZTUwLTcxNjg0MjYzLWJmODQxMGQ3LTdiNjBkMzA1MB4XDTI1MDIwODE2Mzgz\nMVoXDTI2MDIwODE2MzQyMlowFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvo+Mk9iNX9eMyjEkJ12RlwojIVrDxP+M\nke2N2ROsWvcTTl/c7zrn7gAubjX0B3TixAkVpVEmp1UV3m1YIx/FF+djWmmFI/uD\n6xPecTPyVF7qaMWL2M5xrpr6bScAt/qBPVi9UEPYTyzgbnM9ruasHfo+SxoWfETu\nJCHGccigpWr6u6V/RvDB153vXq7uyUthRK7cvz+D8KapDsKa8TApUsoLUIGDsrRJ\nDV/kQ3SMfWbyWvvwP0yV+F+ixBTZXvH6mwCec9HLacaZ4UgwVR8OVzx1RF5t1zdU\nbZmug/qRxBkOyeJ5dLTXSOUksRR/vMO/2I9mg98qxLCMNmhrTbaYcwIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFHN0\nqZzVgbhK7ohNNvBeSXNHfLHuMB8GA1UdIwQYMBaAFHh0WJa8bMCgrQrtjYLYPqKn\ncikPMA0GCSqGSIb3DQEBCwUAA4IBAQAmwmzxV6mIJMvP+wxPDdSovpimjXc+0YaB\nFBv6HESeiVaza0xnlw2kCdq5NJVy3gxiJLLICfA/eT+lmg/UmlyqTyFmxMNvEaDn\nEXk3gkw4p4jFoKM9b7L96UZoBY3rnNekyucaBFH+5esfioesEz/kzitT7KmWuBuO\nk3Q9gSffyaTWKZAIq10FapVtLVJeRemOQJ4JKFM4E3h3elPL5OpKZPOVXqSM7qRv\nsuBGwbO0zVBBr4UqWJIZOze4iymxXmNn/OPpGgLsTA//GUUvWjPCZkwqCmzIaAcg\nY+jaK0CmunUx62GrvobeFAC+A7wknke9NJgnr2yzm2eEeQ7GSUGo\n-----END CERTIFICATE-----\n",
"key": "/etc/pki/tls/private/quadlet_demo.key",
"key_content": "-----BEGIN PRIVATE KEY-----\nMIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQC+j4yT2I1f14zK\nMSQnXZGXCiMhWsPE/4yR7Y3ZE6xa9xNOX9zvOufuAC5uNfQHdOLECRWlUSanVRXe\nbVgjH8UX52NaaYUj+4PrE95xM/JUXupoxYvYznGumvptJwC3+oE9WL1QQ9hPLOBu\ncz2u5qwd+j5LGhZ8RO4kIcZxyKClavq7pX9G8MHXne9eru7JS2FErty/P4PwpqkO\nwprxMClSygtQgYOytEkNX+RDdIx9ZvJa+/A/TJX4X6LEFNle8fqbAJ5z0ctpxpnh\nSDBVHw5XPHVEXm3XN1Rtma6D+pHEGQ7J4nl0tNdI5SSxFH+8w7/Yj2aD3yrEsIw2\naGtNtphzAgMBAAECggEAFZ97T6FwdVIjpyPJWvAqn41mbBf9A+MNg9eFU38AfTfx\nCGEnVDwGakK6AgZ8QMZaHu16983MCYvFlf4pVJ77wdkfNHipw2n8T5XrFURUOI+m\noKnd0wWl766lQPJ+ieHSfXAO10dDicRpWQ8ZjjmdiS9Gl0YIDCTUMuRkPLALzokx\nYt+8GK1PamnJ5X4olBK6rytg2ZiCyUbYzDsJZQkolhDY6CirYd5ML8S5JEgVLSwo\n9AEcGdL9oVK9iY/MaJ465bC1iSR45gqfMyYA/d7mxDanEZahxFZv+ngSscVztmb5\nLup6MfzGtBkjtuz+XwjrrK2fZ8sN7TwLuEzTN0x32QKBgQDproo/OV/gwKKvqKS0\n6F4y9Rk8b1tbs3sApEfwakRkBxMD2C3g76wiAvxGWbdPF/eHwyGMe1yDaTy/9/O6\nsU37Rtj28nAppDHGZ9JMYunRIbP8u8Tg61J6XkQhNAT7TB87POJ2touwXpsmjQ2H\nSsPEKAd3MXklp3bz/vsBxvNUiwKBgQDQwrXV9XRfUVveXYPCqFa37W/uLo48qQBq\n8qKK/c4gceTz6qGxThWpj1c+hZKSXXRM5Wj01XCMIv7HdCUu1/YMYONvDchaplKr\npyKcrITF2rknrby80xnfUBpIue5XhyacLquqh84TG6jbUVuGV+VZLxViKneWFZEA\nL0jNBuuAuQKBgHAZseo4jN0VrDWWMkUyv5W0bIHIbcwk4pnhNT2IzPwrRJzwhLnG\nYA4E3p6lBUHlHnK7UgnBWBUWU/VHbl2eFum3B/gXP2VKhjJGteS9Aqu3fk98iCrZ\n2LwpIofHG6it0MGrKHyIf6SBDe7Fdbr0S8t65HHygspkdrUAGcu1dB+XAoGAZhUA\nc9quCDoxkT/rQVGt7gzGMhND7He7w5BYWV0Y4ZawHXI7lQcY4qw8Pk90x+u5Bi0c\nMLT6bxxht8icshZas9zg+hokQeeY/2UgMLrPxixZZAS6CR+1RBTYdX2uBdEJ2U+d\nFCuKYp+g53YWKlBqzzyN2U3o4lQzboKwt8lNeckCgYAfcJrHKfdwg5xV85XoXe5L\nkTUDyHMyxsq6tt9XP+qFR+KQm4ShUkzX43ox7nm+Dse+muLaKVeYloWI8JVBuT4e\nXovmx5fQXWjwCxs2FH1oa847K2tYwq5VUjwn4EBrUQQgftQhKSblCHRIeKapj0OA\nHzgy1L/1KryHjmEuckPGXw==\n-----END PRIVATE KEY-----\n"
}
}
},
"changed": false
}
TASK [fedora.linux_system_roles.certificate : Stop tracking certificates] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:176
Saturday 08 February 2025 11:38:33 -0500 (0:00:00.048) 0:00:08.016 *****
ok: [managed-node1] => (item={'cert': '/etc/pki/tls/certs/quadlet_demo.crt', 'cert_content': '-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRAPMIvlBxaEJjv4QQ13tg0wcwDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMI2Yz\nMDhiZTUwLTcxNjg0MjYzLWJmODQxMGQ3LTdiNjBkMzA1MB4XDTI1MDIwODE2Mzgz\nMVoXDTI2MDIwODE2MzQyMlowFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvo+Mk9iNX9eMyjEkJ12RlwojIVrDxP+M\nke2N2ROsWvcTTl/c7zrn7gAubjX0B3TixAkVpVEmp1UV3m1YIx/FF+djWmmFI/uD\n6xPecTPyVF7qaMWL2M5xrpr6bScAt/qBPVi9UEPYTyzgbnM9ruasHfo+SxoWfETu\nJCHGccigpWr6u6V/RvDB153vXq7uyUthRK7cvz+D8KapDsKa8TApUsoLUIGDsrRJ\nDV/kQ3SMfWbyWvvwP0yV+F+ixBTZXvH6mwCec9HLacaZ4UgwVR8OVzx1RF5t1zdU\nbZmug/qRxBkOyeJ5dLTXSOUksRR/vMO/2I9mg98qxLCMNmhrTbaYcwIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFHN0\nqZzVgbhK7ohNNvBeSXNHfLHuMB8GA1UdIwQYMBaAFHh0WJa8bMCgrQrtjYLYPqKn\ncikPMA0GCSqGSIb3DQEBCwUAA4IBAQAmwmzxV6mIJMvP+wxPDdSovpimjXc+0YaB\nFBv6HESeiVaza0xnlw2kCdq5NJVy3gxiJLLICfA/eT+lmg/UmlyqTyFmxMNvEaDn\nEXk3gkw4p4jFoKM9b7L96UZoBY3rnNekyucaBFH+5esfioesEz/kzitT7KmWuBuO\nk3Q9gSffyaTWKZAIq10FapVtLVJeRemOQJ4JKFM4E3h3elPL5OpKZPOVXqSM7qRv\nsuBGwbO0zVBBr4UqWJIZOze4iymxXmNn/OPpGgLsTA//GUUvWjPCZkwqCmzIaAcg\nY+jaK0CmunUx62GrvobeFAC+A7wknke9NJgnr2yzm2eEeQ7GSUGo\n-----END CERTIFICATE-----\n', 'key': '/etc/pki/tls/private/quadlet_demo.key', 'key_content': '-----BEGIN PRIVATE KEY-----\nMIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQC+j4yT2I1f14zK\nMSQnXZGXCiMhWsPE/4yR7Y3ZE6xa9xNOX9zvOufuAC5uNfQHdOLECRWlUSanVRXe\nbVgjH8UX52NaaYUj+4PrE95xM/JUXupoxYvYznGumvptJwC3+oE9WL1QQ9hPLOBu\ncz2u5qwd+j5LGhZ8RO4kIcZxyKClavq7pX9G8MHXne9eru7JS2FErty/P4PwpqkO\nwprxMClSygtQgYOytEkNX+RDdIx9ZvJa+/A/TJX4X6LEFNle8fqbAJ5z0ctpxpnh\nSDBVHw5XPHVEXm3XN1Rtma6D+pHEGQ7J4nl0tNdI5SSxFH+8w7/Yj2aD3yrEsIw2\naGtNtphzAgMBAAECggEAFZ97T6FwdVIjpyPJWvAqn41mbBf9A+MNg9eFU38AfTfx\nCGEnVDwGakK6AgZ8QMZaHu16983MCYvFlf4pVJ77wdkfNHipw2n8T5XrFURUOI+m\noKnd0wWl766lQPJ+ieHSfXAO10dDicRpWQ8ZjjmdiS9Gl0YIDCTUMuRkPLALzokx\nYt+8GK1PamnJ5X4olBK6rytg2ZiCyUbYzDsJZQkolhDY6CirYd5ML8S5JEgVLSwo\n9AEcGdL9oVK9iY/MaJ465bC1iSR45gqfMyYA/d7mxDanEZahxFZv+ngSscVztmb5\nLup6MfzGtBkjtuz+XwjrrK2fZ8sN7TwLuEzTN0x32QKBgQDproo/OV/gwKKvqKS0\n6F4y9Rk8b1tbs3sApEfwakRkBxMD2C3g76wiAvxGWbdPF/eHwyGMe1yDaTy/9/O6\nsU37Rtj28nAppDHGZ9JMYunRIbP8u8Tg61J6XkQhNAT7TB87POJ2touwXpsmjQ2H\nSsPEKAd3MXklp3bz/vsBxvNUiwKBgQDQwrXV9XRfUVveXYPCqFa37W/uLo48qQBq\n8qKK/c4gceTz6qGxThWpj1c+hZKSXXRM5Wj01XCMIv7HdCUu1/YMYONvDchaplKr\npyKcrITF2rknrby80xnfUBpIue5XhyacLquqh84TG6jbUVuGV+VZLxViKneWFZEA\nL0jNBuuAuQKBgHAZseo4jN0VrDWWMkUyv5W0bIHIbcwk4pnhNT2IzPwrRJzwhLnG\nYA4E3p6lBUHlHnK7UgnBWBUWU/VHbl2eFum3B/gXP2VKhjJGteS9Aqu3fk98iCrZ\n2LwpIofHG6it0MGrKHyIf6SBDe7Fdbr0S8t65HHygspkdrUAGcu1dB+XAoGAZhUA\nc9quCDoxkT/rQVGt7gzGMhND7He7w5BYWV0Y4ZawHXI7lQcY4qw8Pk90x+u5Bi0c\nMLT6bxxht8icshZas9zg+hokQeeY/2UgMLrPxixZZAS6CR+1RBTYdX2uBdEJ2U+d\nFCuKYp+g53YWKlBqzzyN2U3o4lQzboKwt8lNeckCgYAfcJrHKfdwg5xV85XoXe5L\nkTUDyHMyxsq6tt9XP+qFR+KQm4ShUkzX43ox7nm+Dse+muLaKVeYloWI8JVBuT4e\nXovmx5fQXWjwCxs2FH1oa847K2tYwq5VUjwn4EBrUQQgftQhKSblCHRIeKapj0OA\nHzgy1L/1KryHjmEuckPGXw==\n-----END PRIVATE KEY-----\n', 'ca': '/etc/pki/tls/certs/quadlet_demo.crt', 'ca_content': '-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRAPMIvlBxaEJjv4QQ13tg0wcwDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMI2Yz\nMDhiZTUwLTcxNjg0MjYzLWJmODQxMGQ3LTdiNjBkMzA1MB4XDTI1MDIwODE2Mzgz\nMVoXDTI2MDIwODE2MzQyMlowFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvo+Mk9iNX9eMyjEkJ12RlwojIVrDxP+M\nke2N2ROsWvcTTl/c7zrn7gAubjX0B3TixAkVpVEmp1UV3m1YIx/FF+djWmmFI/uD\n6xPecTPyVF7qaMWL2M5xrpr6bScAt/qBPVi9UEPYTyzgbnM9ruasHfo+SxoWfETu\nJCHGccigpWr6u6V/RvDB153vXq7uyUthRK7cvz+D8KapDsKa8TApUsoLUIGDsrRJ\nDV/kQ3SMfWbyWvvwP0yV+F+ixBTZXvH6mwCec9HLacaZ4UgwVR8OVzx1RF5t1zdU\nbZmug/qRxBkOyeJ5dLTXSOUksRR/vMO/2I9mg98qxLCMNmhrTbaYcwIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFHN0\nqZzVgbhK7ohNNvBeSXNHfLHuMB8GA1UdIwQYMBaAFHh0WJa8bMCgrQrtjYLYPqKn\ncikPMA0GCSqGSIb3DQEBCwUAA4IBAQAmwmzxV6mIJMvP+wxPDdSovpimjXc+0YaB\nFBv6HESeiVaza0xnlw2kCdq5NJVy3gxiJLLICfA/eT+lmg/UmlyqTyFmxMNvEaDn\nEXk3gkw4p4jFoKM9b7L96UZoBY3rnNekyucaBFH+5esfioesEz/kzitT7KmWuBuO\nk3Q9gSffyaTWKZAIq10FapVtLVJeRemOQJ4JKFM4E3h3elPL5OpKZPOVXqSM7qRv\nsuBGwbO0zVBBr4UqWJIZOze4iymxXmNn/OPpGgLsTA//GUUvWjPCZkwqCmzIaAcg\nY+jaK0CmunUx62GrvobeFAC+A7wknke9NJgnr2yzm2eEeQ7GSUGo\n-----END CERTIFICATE-----\n'}) => {
"ansible_loop_var": "item",
"changed": false,
"cmd": [
"getcert",
"stop-tracking",
"-f",
"/etc/pki/tls/certs/quadlet_demo.crt"
],
"delta": "0:00:00.026081",
"end": "2025-02-08 11:38:33.441795",
"item": {
"ca": "/etc/pki/tls/certs/quadlet_demo.crt",
"ca_content": "-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRAPMIvlBxaEJjv4QQ13tg0wcwDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMI2Yz\nMDhiZTUwLTcxNjg0MjYzLWJmODQxMGQ3LTdiNjBkMzA1MB4XDTI1MDIwODE2Mzgz\nMVoXDTI2MDIwODE2MzQyMlowFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvo+Mk9iNX9eMyjEkJ12RlwojIVrDxP+M\nke2N2ROsWvcTTl/c7zrn7gAubjX0B3TixAkVpVEmp1UV3m1YIx/FF+djWmmFI/uD\n6xPecTPyVF7qaMWL2M5xrpr6bScAt/qBPVi9UEPYTyzgbnM9ruasHfo+SxoWfETu\nJCHGccigpWr6u6V/RvDB153vXq7uyUthRK7cvz+D8KapDsKa8TApUsoLUIGDsrRJ\nDV/kQ3SMfWbyWvvwP0yV+F+ixBTZXvH6mwCec9HLacaZ4UgwVR8OVzx1RF5t1zdU\nbZmug/qRxBkOyeJ5dLTXSOUksRR/vMO/2I9mg98qxLCMNmhrTbaYcwIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFHN0\nqZzVgbhK7ohNNvBeSXNHfLHuMB8GA1UdIwQYMBaAFHh0WJa8bMCgrQrtjYLYPqKn\ncikPMA0GCSqGSIb3DQEBCwUAA4IBAQAmwmzxV6mIJMvP+wxPDdSovpimjXc+0YaB\nFBv6HESeiVaza0xnlw2kCdq5NJVy3gxiJLLICfA/eT+lmg/UmlyqTyFmxMNvEaDn\nEXk3gkw4p4jFoKM9b7L96UZoBY3rnNekyucaBFH+5esfioesEz/kzitT7KmWuBuO\nk3Q9gSffyaTWKZAIq10FapVtLVJeRemOQJ4JKFM4E3h3elPL5OpKZPOVXqSM7qRv\nsuBGwbO0zVBBr4UqWJIZOze4iymxXmNn/OPpGgLsTA//GUUvWjPCZkwqCmzIaAcg\nY+jaK0CmunUx62GrvobeFAC+A7wknke9NJgnr2yzm2eEeQ7GSUGo\n-----END CERTIFICATE-----\n",
"cert": "/etc/pki/tls/certs/quadlet_demo.crt",
"cert_content": "-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRAPMIvlBxaEJjv4QQ13tg0wcwDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMI2Yz\nMDhiZTUwLTcxNjg0MjYzLWJmODQxMGQ3LTdiNjBkMzA1MB4XDTI1MDIwODE2Mzgz\nMVoXDTI2MDIwODE2MzQyMlowFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvo+Mk9iNX9eMyjEkJ12RlwojIVrDxP+M\nke2N2ROsWvcTTl/c7zrn7gAubjX0B3TixAkVpVEmp1UV3m1YIx/FF+djWmmFI/uD\n6xPecTPyVF7qaMWL2M5xrpr6bScAt/qBPVi9UEPYTyzgbnM9ruasHfo+SxoWfETu\nJCHGccigpWr6u6V/RvDB153vXq7uyUthRK7cvz+D8KapDsKa8TApUsoLUIGDsrRJ\nDV/kQ3SMfWbyWvvwP0yV+F+ixBTZXvH6mwCec9HLacaZ4UgwVR8OVzx1RF5t1zdU\nbZmug/qRxBkOyeJ5dLTXSOUksRR/vMO/2I9mg98qxLCMNmhrTbaYcwIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFHN0\nqZzVgbhK7ohNNvBeSXNHfLHuMB8GA1UdIwQYMBaAFHh0WJa8bMCgrQrtjYLYPqKn\ncikPMA0GCSqGSIb3DQEBCwUAA4IBAQAmwmzxV6mIJMvP+wxPDdSovpimjXc+0YaB\nFBv6HESeiVaza0xnlw2kCdq5NJVy3gxiJLLICfA/eT+lmg/UmlyqTyFmxMNvEaDn\nEXk3gkw4p4jFoKM9b7L96UZoBY3rnNekyucaBFH+5esfioesEz/kzitT7KmWuBuO\nk3Q9gSffyaTWKZAIq10FapVtLVJeRemOQJ4JKFM4E3h3elPL5OpKZPOVXqSM7qRv\nsuBGwbO0zVBBr4UqWJIZOze4iymxXmNn/OPpGgLsTA//GUUvWjPCZkwqCmzIaAcg\nY+jaK0CmunUx62GrvobeFAC+A7wknke9NJgnr2yzm2eEeQ7GSUGo\n-----END CERTIFICATE-----\n",
"key": "/etc/pki/tls/private/quadlet_demo.key",
"key_content": "-----BEGIN PRIVATE KEY-----\nMIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQC+j4yT2I1f14zK\nMSQnXZGXCiMhWsPE/4yR7Y3ZE6xa9xNOX9zvOufuAC5uNfQHdOLECRWlUSanVRXe\nbVgjH8UX52NaaYUj+4PrE95xM/JUXupoxYvYznGumvptJwC3+oE9WL1QQ9hPLOBu\ncz2u5qwd+j5LGhZ8RO4kIcZxyKClavq7pX9G8MHXne9eru7JS2FErty/P4PwpqkO\nwprxMClSygtQgYOytEkNX+RDdIx9ZvJa+/A/TJX4X6LEFNle8fqbAJ5z0ctpxpnh\nSDBVHw5XPHVEXm3XN1Rtma6D+pHEGQ7J4nl0tNdI5SSxFH+8w7/Yj2aD3yrEsIw2\naGtNtphzAgMBAAECggEAFZ97T6FwdVIjpyPJWvAqn41mbBf9A+MNg9eFU38AfTfx\nCGEnVDwGakK6AgZ8QMZaHu16983MCYvFlf4pVJ77wdkfNHipw2n8T5XrFURUOI+m\noKnd0wWl766lQPJ+ieHSfXAO10dDicRpWQ8ZjjmdiS9Gl0YIDCTUMuRkPLALzokx\nYt+8GK1PamnJ5X4olBK6rytg2ZiCyUbYzDsJZQkolhDY6CirYd5ML8S5JEgVLSwo\n9AEcGdL9oVK9iY/MaJ465bC1iSR45gqfMyYA/d7mxDanEZahxFZv+ngSscVztmb5\nLup6MfzGtBkjtuz+XwjrrK2fZ8sN7TwLuEzTN0x32QKBgQDproo/OV/gwKKvqKS0\n6F4y9Rk8b1tbs3sApEfwakRkBxMD2C3g76wiAvxGWbdPF/eHwyGMe1yDaTy/9/O6\nsU37Rtj28nAppDHGZ9JMYunRIbP8u8Tg61J6XkQhNAT7TB87POJ2touwXpsmjQ2H\nSsPEKAd3MXklp3bz/vsBxvNUiwKBgQDQwrXV9XRfUVveXYPCqFa37W/uLo48qQBq\n8qKK/c4gceTz6qGxThWpj1c+hZKSXXRM5Wj01XCMIv7HdCUu1/YMYONvDchaplKr\npyKcrITF2rknrby80xnfUBpIue5XhyacLquqh84TG6jbUVuGV+VZLxViKneWFZEA\nL0jNBuuAuQKBgHAZseo4jN0VrDWWMkUyv5W0bIHIbcwk4pnhNT2IzPwrRJzwhLnG\nYA4E3p6lBUHlHnK7UgnBWBUWU/VHbl2eFum3B/gXP2VKhjJGteS9Aqu3fk98iCrZ\n2LwpIofHG6it0MGrKHyIf6SBDe7Fdbr0S8t65HHygspkdrUAGcu1dB+XAoGAZhUA\nc9quCDoxkT/rQVGt7gzGMhND7He7w5BYWV0Y4ZawHXI7lQcY4qw8Pk90x+u5Bi0c\nMLT6bxxht8icshZas9zg+hokQeeY/2UgMLrPxixZZAS6CR+1RBTYdX2uBdEJ2U+d\nFCuKYp+g53YWKlBqzzyN2U3o4lQzboKwt8lNeckCgYAfcJrHKfdwg5xV85XoXe5L\nkTUDyHMyxsq6tt9XP+qFR+KQm4ShUkzX43ox7nm+Dse+muLaKVeYloWI8JVBuT4e\nXovmx5fQXWjwCxs2FH1oa847K2tYwq5VUjwn4EBrUQQgftQhKSblCHRIeKapj0OA\nHzgy1L/1KryHjmEuckPGXw==\n-----END PRIVATE KEY-----\n"
},
"rc": 0,
"start": "2025-02-08 11:38:33.415714"
}
STDOUT:
Request "20250208163831" removed.
TASK [fedora.linux_system_roles.certificate : Remove files] ********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:181
Saturday 08 February 2025 11:38:33 -0500 (0:00:00.503) 0:00:08.519 *****
changed: [managed-node1] => (item=/etc/pki/tls/certs/quadlet_demo.crt) => {
"ansible_loop_var": "item",
"changed": true,
"item": "/etc/pki/tls/certs/quadlet_demo.crt",
"path": "/etc/pki/tls/certs/quadlet_demo.crt",
"state": "absent"
}
changed: [managed-node1] => (item=/etc/pki/tls/private/quadlet_demo.key) => {
"ansible_loop_var": "item",
"changed": true,
"item": "/etc/pki/tls/private/quadlet_demo.key",
"path": "/etc/pki/tls/private/quadlet_demo.key",
"state": "absent"
}
ok: [managed-node1] => (item=/etc/pki/tls/certs/quadlet_demo.crt) => {
"ansible_loop_var": "item",
"changed": false,
"item": "/etc/pki/tls/certs/quadlet_demo.crt",
"path": "/etc/pki/tls/certs/quadlet_demo.crt",
"state": "absent"
}
TASK [Run the role] ************************************************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:62
Saturday 08 February 2025 11:38:34 -0500 (0:00:01.117) 0:00:09.637 *****
included: fedora.linux_system_roles.podman for managed-node1
TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3
Saturday 08 February 2025 11:38:34 -0500 (0:00:00.124) 0:00:09.762 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] ****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3
Saturday 08 February 2025 11:38:34 -0500 (0:00:00.047) 0:00:09.809 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11
Saturday 08 February 2025 11:38:34 -0500 (0:00:00.060) 0:00:09.869 *****
ok: [managed-node1] => {
"changed": false,
"stat": {
"exists": false
}
}
TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16
Saturday 08 February 2025 11:38:35 -0500 (0:00:00.435) 0:00:10.305 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_is_ostree": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23
Saturday 08 February 2025 11:38:35 -0500 (0:00:00.052) 0:00:10.358 *****
ok: [managed-node1] => {
"changed": false,
"stat": {
"exists": false
}
}
TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28
Saturday 08 February 2025 11:38:35 -0500 (0:00:00.394) 0:00:10.753 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_is_transactional": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32
Saturday 08 February 2025 11:38:35 -0500 (0:00:00.035) 0:00:10.788 *****
ok: [managed-node1] => (item=RedHat.yml) => {
"ansible_facts": {
"__podman_packages": [
"podman",
"shadow-utils-subid"
]
},
"ansible_included_var_files": [
"/tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml"
],
"ansible_loop_var": "item",
"changed": false,
"item": "RedHat.yml"
}
skipping: [managed-node1] => (item=CentOS.yml) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "__vars_file is file",
"item": "CentOS.yml",
"skip_reason": "Conditional result was False"
}
ok: [managed-node1] => (item=CentOS_10.yml) => {
"ansible_facts": {
"__podman_packages": [
"iptables-nft",
"podman",
"shadow-utils-subid"
]
},
"ansible_included_var_files": [
"/tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml"
],
"ansible_loop_var": "item",
"changed": false,
"item": "CentOS_10.yml"
}
ok: [managed-node1] => (item=CentOS_10.yml) => {
"ansible_facts": {
"__podman_packages": [
"iptables-nft",
"podman",
"shadow-utils-subid"
]
},
"ansible_included_var_files": [
"/tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml"
],
"ansible_loop_var": "item",
"changed": false,
"item": "CentOS_10.yml"
}
TASK [fedora.linux_system_roles.podman : Gather the package facts] *************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6
Saturday 08 February 2025 11:38:35 -0500 (0:00:00.068) 0:00:10.857 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Enable copr if requested] *************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10
Saturday 08 February 2025 11:38:36 -0500 (0:00:01.076) 0:00:11.933 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_use_copr | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14
Saturday 08 February 2025 11:38:36 -0500 (0:00:00.048) 0:00:11.981 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "(__podman_packages | difference(ansible_facts.packages))",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28
Saturday 08 February 2025 11:38:37 -0500 (0:00:00.067) 0:00:12.049 *****
skipping: [managed-node1] => {
"false_condition": "__podman_is_transactional | d(false)"
}
TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33
Saturday 08 February 2025 11:38:37 -0500 (0:00:00.082) 0:00:12.131 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_is_transactional | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38
Saturday 08 February 2025 11:38:37 -0500 (0:00:00.126) 0:00:12.258 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_is_transactional | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get podman version] *******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46
Saturday 08 February 2025 11:38:37 -0500 (0:00:00.105) 0:00:12.364 *****
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"--version"
],
"delta": "0:00:00.028420",
"end": "2025-02-08 11:38:37.713304",
"rc": 0,
"start": "2025-02-08 11:38:37.684884"
}
STDOUT:
podman version 5.3.1
TASK [fedora.linux_system_roles.podman : Set podman version] *******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52
Saturday 08 February 2025 11:38:37 -0500 (0:00:00.486) 0:00:12.850 *****
ok: [managed-node1] => {
"ansible_facts": {
"podman_version": "5.3.1"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56
Saturday 08 February 2025 11:38:37 -0500 (0:00:00.095) 0:00:12.945 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_version is version(\"4.2\", \"<\")",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63
Saturday 08 February 2025 11:38:38 -0500 (0:00:00.088) 0:00:13.034 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_version is version(\"4.4\", \"<\")",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73
Saturday 08 February 2025 11:38:38 -0500 (0:00:00.245) 0:00:13.279 *****
META: end_host conditional evaluated to False, continuing execution for managed-node1
skipping: [managed-node1] => {
"skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1"
}
MSG:
end_host conditional evaluated to false, continuing execution for managed-node1
TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80
Saturday 08 February 2025 11:38:38 -0500 (0:00:00.211) 0:00:13.491 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96
Saturday 08 February 2025 11:38:38 -0500 (0:00:00.069) 0:00:13.561 *****
META: end_host conditional evaluated to False, continuing execution for managed-node1
skipping: [managed-node1] => {
"skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1"
}
MSG:
end_host conditional evaluated to false, continuing execution for managed-node1
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109
Saturday 08 February 2025 11:38:38 -0500 (0:00:00.085) 0:00:13.646 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 08 February 2025 11:38:38 -0500 (0:00:00.111) 0:00:13.757 *****
ok: [managed-node1] => {
"ansible_facts": {
"getent_passwd": {
"root": [
"x",
"0",
"0",
"Super User",
"/root",
"/bin/bash"
]
}
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 08 February 2025 11:38:39 -0500 (0:00:00.693) 0:00:14.451 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 08 February 2025 11:38:39 -0500 (0:00:00.057) 0:00:14.508 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Saturday 08 February 2025 11:38:39 -0500 (0:00:00.064) 0:00:14.573 *****
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1739032452.5561087,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
"ctime": 1739032446.715072,
"dev": 51714,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 9150722,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-pie-executable",
"mode": "0755",
"mtime": 1730678400.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 15744,
"uid": 0,
"version": "796201794",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Saturday 08 February 2025 11:38:40 -0500 (0:00:00.455) 0:00:15.029 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Saturday 08 February 2025 11:38:40 -0500 (0:00:00.132) 0:00:15.162 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Saturday 08 February 2025 11:38:40 -0500 (0:00:00.099) 0:00:15.261 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Saturday 08 February 2025 11:38:40 -0500 (0:00:00.093) 0:00:15.355 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Saturday 08 February 2025 11:38:40 -0500 (0:00:00.092) 0:00:15.447 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Saturday 08 February 2025 11:38:40 -0500 (0:00:00.090) 0:00:15.537 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Saturday 08 February 2025 11:38:40 -0500 (0:00:00.066) 0:00:15.604 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Saturday 08 February 2025 11:38:40 -0500 (0:00:00.049) 0:00:15.653 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set config file paths] ****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115
Saturday 08 February 2025 11:38:40 -0500 (0:00:00.059) 0:00:15.712 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf",
"__podman_policy_json_file": "/etc/containers/policy.json",
"__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf",
"__podman_storage_conf_file": "/etc/containers/storage.conf"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle container.conf.d] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124
Saturday 08 February 2025 11:38:40 -0500 (0:00:00.102) 0:00:15.815 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] ***********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5
Saturday 08 February 2025 11:38:40 -0500 (0:00:00.099) 0:00:15.915 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_containers_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Update container config file] *********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13
Saturday 08 February 2025 11:38:41 -0500 (0:00:00.106) 0:00:16.021 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_containers_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] *************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127
Saturday 08 February 2025 11:38:41 -0500 (0:00:00.065) 0:00:16.086 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] ***********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5
Saturday 08 February 2025 11:38:41 -0500 (0:00:00.142) 0:00:16.229 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_registries_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Update registries config file] ********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13
Saturday 08 February 2025 11:38:41 -0500 (0:00:00.092) 0:00:16.321 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_registries_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Handle storage.conf] ******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130
Saturday 08 February 2025 11:38:41 -0500 (0:00:00.095) 0:00:16.417 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5
Saturday 08 February 2025 11:38:41 -0500 (0:00:00.185) 0:00:16.603 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_storage_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Update storage config file] ***********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13
Saturday 08 February 2025 11:38:41 -0500 (0:00:00.099) 0:00:16.703 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_storage_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Handle policy.json] *******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133
Saturday 08 February 2025 11:38:41 -0500 (0:00:00.087) 0:00:16.791 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6
Saturday 08 February 2025 11:38:41 -0500 (0:00:00.156) 0:00:16.947 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_policy_json | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14
Saturday 08 February 2025 11:38:42 -0500 (0:00:00.089) 0:00:17.037 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_policy_json | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get the existing policy.json] *********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19
Saturday 08 February 2025 11:38:42 -0500 (0:00:00.086) 0:00:17.123 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_policy_json | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Write new policy.json file] ***********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25
Saturday 08 February 2025 11:38:42 -0500 (0:00:00.062) 0:00:17.186 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_policy_json | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [Manage firewall for specified ports] *************************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139
Saturday 08 February 2025 11:38:42 -0500 (0:00:00.067) 0:00:17.254 *****
included: fedora.linux_system_roles.firewall for managed-node1
TASK [fedora.linux_system_roles.firewall : Setup firewalld] ********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2
Saturday 08 February 2025 11:38:42 -0500 (0:00:00.150) 0:00:17.404 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node1
TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2
Saturday 08 February 2025 11:38:42 -0500 (0:00:00.056) 0:00:17.460 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Check if system is ostree] **********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10
Saturday 08 February 2025 11:38:42 -0500 (0:00:00.083) 0:00:17.544 *****
ok: [managed-node1] => {
"changed": false,
"stat": {
"exists": false
}
}
TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15
Saturday 08 February 2025 11:38:42 -0500 (0:00:00.403) 0:00:17.948 *****
ok: [managed-node1] => {
"ansible_facts": {
"__firewall_is_ostree": false
},
"changed": false
}
TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22
Saturday 08 February 2025 11:38:42 -0500 (0:00:00.037) 0:00:17.985 *****
ok: [managed-node1] => {
"changed": false,
"stat": {
"exists": false
}
}
TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27
Saturday 08 February 2025 11:38:43 -0500 (0:00:00.440) 0:00:18.425 *****
ok: [managed-node1] => {
"ansible_facts": {
"__firewall_is_transactional": false
},
"changed": false
}
TASK [fedora.linux_system_roles.firewall : Install firewalld] ******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31
Saturday 08 February 2025 11:38:43 -0500 (0:00:00.070) 0:00:18.496 *****
ok: [managed-node1] => {
"changed": false,
"rc": 0,
"results": []
}
MSG:
Nothing to do
lsrpackages: firewalld
TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:43
Saturday 08 February 2025 11:38:44 -0500 (0:00:00.882) 0:00:19.378 *****
skipping: [managed-node1] => {
"false_condition": "__firewall_is_transactional | d(false)"
}
TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:48
Saturday 08 February 2025 11:38:44 -0500 (0:00:00.098) 0:00:19.477 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_is_transactional | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:53
Saturday 08 February 2025 11:38:44 -0500 (0:00:00.104) 0:00:19.582 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_is_transactional | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Collect service facts] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5
Saturday 08 February 2025 11:38:44 -0500 (0:00:00.071) 0:00:19.653 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:9
Saturday 08 February 2025 11:38:44 -0500 (0:00:00.047) 0:00:19.700 *****
skipping: [managed-node1] => (item=nftables) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "nftables",
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item=iptables) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "iptables",
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item=ufw) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "ufw",
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => {
"changed": false
}
MSG:
All items skipped
TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] ***********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22
Saturday 08 February 2025 11:38:44 -0500 (0:00:00.064) 0:00:19.765 *****
ok: [managed-node1] => {
"changed": false,
"name": "firewalld",
"status": {
"AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0",
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "sysinit.target dbus.socket dbus-broker.service system.slice polkit.service basic.target",
"AllowIsolate": "no",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "shutdown.target network-pre.target",
"BindLogSockets": "no",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"BusName": "org.fedoraproject.FirewallD1",
"CPUAccounting": "yes",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanLiveMount": "yes",
"CanReload": "yes",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
"CleanResult": "success",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "iptables.service ipset.service ip6tables.service ebtables.service shutdown.target",
"ControlGroupId": "0",
"ControlPID": "0",
"CoredumpFilter": "0x33",
"CoredumpReceive": "no",
"DebugInvocation": "no",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"DefaultStartupMemoryLow": "0",
"Delegate": "no",
"Description": "firewalld - dynamic firewall daemon",
"DeviceAllow": "char-rtc r",
"DevicePolicy": "closed",
"Documentation": "\"man:firewalld(1)\"",
"DynamicUser": "no",
"EffectiveMemoryHigh": "3698229248",
"EffectiveMemoryMax": "3698229248",
"EffectiveTasksMax": "22364",
"EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainHandoffTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExitType": "main",
"ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FileDescriptorStorePreserve": "restart",
"FinalKillSignal": "9",
"FragmentPath": "/usr/lib/systemd/system/firewalld.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOReadBytes": "[not set]",
"IOReadOperations": "[not set]",
"IOSchedulingClass": "2",
"IOSchedulingPriority": "4",
"IOWeight": "[not set]",
"IOWriteBytes": "[not set]",
"IOWriteOperations": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "[no data]",
"IPEgressPackets": "[no data]",
"IPIngressBytes": "[no data]",
"IPIngressPackets": "[no data]",
"Id": "firewalld.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "infinity",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "8388608",
"LimitMEMLOCKSoft": "8388608",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "524288",
"LimitNOFILESoft": "1024",
"LimitNPROC": "13977",
"LimitNPROCSoft": "13977",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "13977",
"LimitSIGPENDINGSoft": "13977",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LiveMountResult": "success",
"LoadState": "loaded",
"LockPersonality": "yes",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"ManagedOOMMemoryPressure": "auto",
"ManagedOOMMemoryPressureDurationUSec": "[not set]",
"ManagedOOMMemoryPressureLimit": "0",
"ManagedOOMPreference": "none",
"ManagedOOMSwap": "auto",
"MemoryAccounting": "yes",
"MemoryAvailable": "3200843776",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "yes",
"MemoryHigh": "infinity",
"MemoryKSM": "no",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemoryPeak": "[not set]",
"MemoryPressureThresholdUSec": "200ms",
"MemoryPressureWatch": "auto",
"MemorySwapCurrent": "[not set]",
"MemorySwapMax": "infinity",
"MemorySwapPeak": "[not set]",
"MemoryZSwapCurrent": "[not set]",
"MemoryZSwapMax": "infinity",
"MemoryZSwapWriteback": "yes",
"MountAPIVFS": "no",
"MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAPolicy": "n/a",
"Names": "firewalld.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMPolicy": "stop",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"OnSuccessJobMode": "fail",
"Perpetual": "no",
"PrivateDevices": "yes",
"PrivateIPC": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivatePIDs": "no",
"PrivateTmp": "no",
"PrivateTmpEx": "no",
"PrivateUsers": "no",
"PrivateUsersEx": "no",
"ProcSubset": "all",
"ProtectClock": "yes",
"ProtectControlGroups": "yes",
"ProtectControlGroupsEx": "yes",
"ProtectHome": "yes",
"ProtectHostname": "yes",
"ProtectKernelLogs": "yes",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectProc": "default",
"ProtectSystem": "yes",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"ReloadResult": "success",
"ReloadSignal": "1",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "sysinit.target system.slice dbus.socket",
"Restart": "no",
"RestartKillSignal": "15",
"RestartMaxDelayUSec": "infinity",
"RestartMode": "normal",
"RestartSteps": "0",
"RestartUSec": "100ms",
"RestartUSecNext": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "yes",
"RestrictSUIDSGID": "yes",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RootEphemeral": "no",
"RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"RuntimeRandomizedExtraUSec": "0",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"SetLoginEnvironment": "no",
"Slice": "system.slice",
"StandardError": "null",
"StandardInput": "null",
"StandardOutput": "null",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StartupMemoryHigh": "infinity",
"StartupMemoryLow": "0",
"StartupMemoryMax": "infinity",
"StartupMemorySwapMax": "infinity",
"StartupMemoryZSwapMax": "infinity",
"StateChangeTimestampMonotonic": "0",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SurviveFinalKillSignal": "no",
"SyslogFacility": "3",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallArchitectures": "native",
"SystemCallErrorNumber": "2147483646",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "22364",
"TimeoutAbortUSec": "1min 30s",
"TimeoutCleanUSec": "infinity",
"TimeoutStartFailureMode": "terminate",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopFailureMode": "terminate",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "dbus",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "enabled",
"UnitFileState": "disabled",
"UtmpMode": "init",
"Wants": "network-pre.target",
"WatchdogSignal": "6",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "infinity"
}
}
TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28
Saturday 08 February 2025 11:38:45 -0500 (0:00:00.655) 0:00:20.421 *****
changed: [managed-node1] => {
"changed": true,
"enabled": true,
"name": "firewalld",
"state": "started",
"status": {
"AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0",
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "system.slice dbus-broker.service sysinit.target basic.target dbus.socket polkit.service",
"AllowIsolate": "no",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "shutdown.target network-pre.target",
"BindLogSockets": "no",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"BusName": "org.fedoraproject.FirewallD1",
"CPUAccounting": "yes",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanLiveMount": "yes",
"CanReload": "yes",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
"CleanResult": "success",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "ipset.service ebtables.service iptables.service shutdown.target ip6tables.service",
"ControlGroupId": "0",
"ControlPID": "0",
"CoredumpFilter": "0x33",
"CoredumpReceive": "no",
"DebugInvocation": "no",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"DefaultStartupMemoryLow": "0",
"Delegate": "no",
"Description": "firewalld - dynamic firewall daemon",
"DeviceAllow": "char-rtc r",
"DevicePolicy": "closed",
"Documentation": "\"man:firewalld(1)\"",
"DynamicUser": "no",
"EffectiveMemoryHigh": "3698229248",
"EffectiveMemoryMax": "3698229248",
"EffectiveTasksMax": "22364",
"EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainHandoffTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExitType": "main",
"ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FileDescriptorStorePreserve": "restart",
"FinalKillSignal": "9",
"FragmentPath": "/usr/lib/systemd/system/firewalld.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOReadBytes": "[not set]",
"IOReadOperations": "[not set]",
"IOSchedulingClass": "2",
"IOSchedulingPriority": "4",
"IOWeight": "[not set]",
"IOWriteBytes": "[not set]",
"IOWriteOperations": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "[no data]",
"IPEgressPackets": "[no data]",
"IPIngressBytes": "[no data]",
"IPIngressPackets": "[no data]",
"Id": "firewalld.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "infinity",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "8388608",
"LimitMEMLOCKSoft": "8388608",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "524288",
"LimitNOFILESoft": "1024",
"LimitNPROC": "13977",
"LimitNPROCSoft": "13977",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "13977",
"LimitSIGPENDINGSoft": "13977",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LiveMountResult": "success",
"LoadState": "loaded",
"LockPersonality": "yes",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"ManagedOOMMemoryPressure": "auto",
"ManagedOOMMemoryPressureDurationUSec": "[not set]",
"ManagedOOMMemoryPressureLimit": "0",
"ManagedOOMPreference": "none",
"ManagedOOMSwap": "auto",
"MemoryAccounting": "yes",
"MemoryAvailable": "3199631360",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "yes",
"MemoryHigh": "infinity",
"MemoryKSM": "no",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemoryPeak": "[not set]",
"MemoryPressureThresholdUSec": "200ms",
"MemoryPressureWatch": "auto",
"MemorySwapCurrent": "[not set]",
"MemorySwapMax": "infinity",
"MemorySwapPeak": "[not set]",
"MemoryZSwapCurrent": "[not set]",
"MemoryZSwapMax": "infinity",
"MemoryZSwapWriteback": "yes",
"MountAPIVFS": "no",
"MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAPolicy": "n/a",
"Names": "firewalld.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMPolicy": "stop",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"OnSuccessJobMode": "fail",
"Perpetual": "no",
"PrivateDevices": "yes",
"PrivateIPC": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivatePIDs": "no",
"PrivateTmp": "no",
"PrivateTmpEx": "no",
"PrivateUsers": "no",
"PrivateUsersEx": "no",
"ProcSubset": "all",
"ProtectClock": "yes",
"ProtectControlGroups": "yes",
"ProtectControlGroupsEx": "yes",
"ProtectHome": "yes",
"ProtectHostname": "yes",
"ProtectKernelLogs": "yes",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectProc": "default",
"ProtectSystem": "yes",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"ReloadResult": "success",
"ReloadSignal": "1",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "system.slice sysinit.target dbus.socket",
"Restart": "no",
"RestartKillSignal": "15",
"RestartMaxDelayUSec": "infinity",
"RestartMode": "normal",
"RestartSteps": "0",
"RestartUSec": "100ms",
"RestartUSecNext": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "yes",
"RestrictSUIDSGID": "yes",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RootEphemeral": "no",
"RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"RuntimeRandomizedExtraUSec": "0",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"SetLoginEnvironment": "no",
"Slice": "system.slice",
"StandardError": "null",
"StandardInput": "null",
"StandardOutput": "null",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StartupMemoryHigh": "infinity",
"StartupMemoryLow": "0",
"StartupMemoryMax": "infinity",
"StartupMemorySwapMax": "infinity",
"StartupMemoryZSwapMax": "infinity",
"StateChangeTimestampMonotonic": "0",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SurviveFinalKillSignal": "no",
"SyslogFacility": "3",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallArchitectures": "native",
"SystemCallErrorNumber": "2147483646",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "22364",
"TimeoutAbortUSec": "1min 30s",
"TimeoutCleanUSec": "infinity",
"TimeoutStartFailureMode": "terminate",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopFailureMode": "terminate",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "dbus",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "enabled",
"UnitFileState": "disabled",
"UtmpMode": "init",
"Wants": "network-pre.target",
"WatchdogSignal": "6",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "infinity"
}
}
TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:34
Saturday 08 February 2025 11:38:46 -0500 (0:00:01.120) 0:00:21.542 *****
ok: [managed-node1] => {
"ansible_facts": {
"__firewall_previous_replaced": false,
"__firewall_python_cmd": "/usr/bin/python3.12",
"__firewall_report_changed": true
},
"changed": false
}
TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:43
Saturday 08 February 2025 11:38:46 -0500 (0:00:00.042) 0:00:21.584 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_previous_replaced | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:55
Saturday 08 February 2025 11:38:46 -0500 (0:00:00.046) 0:00:21.631 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_previous_replaced | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Configure firewall] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71
Saturday 08 February 2025 11:38:46 -0500 (0:00:00.033) 0:00:21.664 *****
changed: [managed-node1] => (item={'port': '8000/tcp', 'state': 'enabled'}) => {
"__firewall_changed": true,
"ansible_loop_var": "item",
"changed": true,
"item": {
"port": "8000/tcp",
"state": "enabled"
}
}
changed: [managed-node1] => (item={'port': '9000/tcp', 'state': 'enabled'}) => {
"__firewall_changed": true,
"ansible_loop_var": "item",
"changed": true,
"item": {
"port": "9000/tcp",
"state": "enabled"
}
}
TASK [fedora.linux_system_roles.firewall : Gather firewall config information] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:120
Saturday 08 February 2025 11:38:47 -0500 (0:00:01.107) 0:00:22.772 *****
skipping: [managed-node1] => (item={'port': '8000/tcp', 'state': 'enabled'}) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall | length == 1",
"item": {
"port": "8000/tcp",
"state": "enabled"
},
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item={'port': '9000/tcp', 'state': 'enabled'}) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall | length == 1",
"item": {
"port": "9000/tcp",
"state": "enabled"
},
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => {
"changed": false
}
MSG:
All items skipped
TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] *******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130
Saturday 08 February 2025 11:38:47 -0500 (0:00:00.045) 0:00:22.818 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "firewall | length == 1",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:139
Saturday 08 February 2025 11:38:47 -0500 (0:00:00.029) 0:00:22.847 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "firewall == None or firewall | length == 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] *******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:144
Saturday 08 February 2025 11:38:47 -0500 (0:00:00.030) 0:00:22.878 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "firewall == None or firewall | length == 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:153
Saturday 08 February 2025 11:38:47 -0500 (0:00:00.028) 0:00:22.906 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_previous_replaced | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Calculate what has changed] *********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:163
Saturday 08 February 2025 11:38:47 -0500 (0:00:00.058) 0:00:22.965 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_previous_replaced | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Show diffs] *************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:169
Saturday 08 February 2025 11:38:47 -0500 (0:00:00.028) 0:00:22.993 *****
skipping: [managed-node1] => {
"false_condition": "__firewall_previous_replaced | bool"
}
TASK [Manage selinux for specified ports] **************************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146
Saturday 08 February 2025 11:38:48 -0500 (0:00:00.043) 0:00:23.037 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_selinux_ports | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153
Saturday 08 February 2025 11:38:48 -0500 (0:00:00.028) 0:00:23.065 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_cancel_user_linger": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] *******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157
Saturday 08 February 2025 11:38:48 -0500 (0:00:00.028) 0:00:23.093 *****
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle credential files - present] ****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166
Saturday 08 February 2025 11:38:48 -0500 (0:00:00.025) 0:00:23.119 *****
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle secrets] ***********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175
Saturday 08 February 2025 11:38:48 -0500 (0:00:00.024) 0:00:23.143 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node1 => (item=(censored due to no_log))
TASK [fedora.linux_system_roles.podman : Set variables part 1] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3
Saturday 08 February 2025 11:38:48 -0500 (0:00:00.129) 0:00:23.273 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7
Saturday 08 February 2025 11:38:48 -0500 (0:00:00.032) 0:00:23.305 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 => (item=(censored due to no_log))
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 08 February 2025 11:38:48 -0500 (0:00:00.051) 0:00:23.357 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 08 February 2025 11:38:48 -0500 (0:00:00.040) 0:00:23.398 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 08 February 2025 11:38:48 -0500 (0:00:00.047) 0:00:23.445 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Saturday 08 February 2025 11:38:48 -0500 (0:00:00.065) 0:00:23.511 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Saturday 08 February 2025 11:38:48 -0500 (0:00:00.047) 0:00:23.558 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Saturday 08 February 2025 11:38:48 -0500 (0:00:00.052) 0:00:23.611 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Saturday 08 February 2025 11:38:48 -0500 (0:00:00.045) 0:00:23.656 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Saturday 08 February 2025 11:38:48 -0500 (0:00:00.040) 0:00:23.697 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Saturday 08 February 2025 11:38:48 -0500 (0:00:00.051) 0:00:23.749 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Saturday 08 February 2025 11:38:48 -0500 (0:00:00.092) 0:00:23.841 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Saturday 08 February 2025 11:38:48 -0500 (0:00:00.049) 0:00:23.890 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Saturday 08 February 2025 11:38:48 -0500 (0:00:00.049) 0:00:23.939 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set variables part 2] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14
Saturday 08 February 2025 11:38:48 -0500 (0:00:00.045) 0:00:23.985 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_rootless": false,
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20
Saturday 08 February 2025 11:38:49 -0500 (0:00:00.058) 0:00:24.044 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 08 February 2025 11:38:49 -0500 (0:00:00.090) 0:00:24.134 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 08 February 2025 11:38:49 -0500 (0:00:00.044) 0:00:24.179 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 08 February 2025 11:38:49 -0500 (0:00:00.046) 0:00:24.225 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25
Saturday 08 February 2025 11:38:49 -0500 (0:00:00.044) 0:00:24.270 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Manage each secret] *******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41
Saturday 08 February 2025 11:38:49 -0500 (0:00:00.044) 0:00:24.315 *****
[WARNING]: Using a variable for a task's 'args' is unsafe in some situations
(see
https://docs.ansible.com/ansible/devel/reference_appendices/faq.html#argsplat-
unsafe)
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Set variables part 1] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3
Saturday 08 February 2025 11:38:49 -0500 (0:00:00.663) 0:00:24.978 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7
Saturday 08 February 2025 11:38:49 -0500 (0:00:00.034) 0:00:25.012 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 => (item=(censored due to no_log))
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 08 February 2025 11:38:50 -0500 (0:00:00.052) 0:00:25.065 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 08 February 2025 11:38:50 -0500 (0:00:00.030) 0:00:25.095 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 08 February 2025 11:38:50 -0500 (0:00:00.032) 0:00:25.128 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Saturday 08 February 2025 11:38:50 -0500 (0:00:00.038) 0:00:25.167 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Saturday 08 February 2025 11:38:50 -0500 (0:00:00.029) 0:00:25.196 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Saturday 08 February 2025 11:38:50 -0500 (0:00:00.028) 0:00:25.224 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Saturday 08 February 2025 11:38:50 -0500 (0:00:00.028) 0:00:25.252 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Saturday 08 February 2025 11:38:50 -0500 (0:00:00.027) 0:00:25.280 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Saturday 08 February 2025 11:38:50 -0500 (0:00:00.059) 0:00:25.339 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Saturday 08 February 2025 11:38:50 -0500 (0:00:00.029) 0:00:25.369 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Saturday 08 February 2025 11:38:50 -0500 (0:00:00.029) 0:00:25.398 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Saturday 08 February 2025 11:38:50 -0500 (0:00:00.028) 0:00:25.426 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set variables part 2] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14
Saturday 08 February 2025 11:38:50 -0500 (0:00:00.029) 0:00:25.455 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_rootless": false,
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20
Saturday 08 February 2025 11:38:50 -0500 (0:00:00.035) 0:00:25.491 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 08 February 2025 11:38:50 -0500 (0:00:00.053) 0:00:25.544 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 08 February 2025 11:38:50 -0500 (0:00:00.026) 0:00:25.571 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 08 February 2025 11:38:50 -0500 (0:00:00.026) 0:00:25.597 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25
Saturday 08 February 2025 11:38:50 -0500 (0:00:00.037) 0:00:25.634 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Manage each secret] *******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41
Saturday 08 February 2025 11:38:50 -0500 (0:00:00.032) 0:00:25.666 *****
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Set variables part 1] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3
Saturday 08 February 2025 11:38:51 -0500 (0:00:00.521) 0:00:26.188 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7
Saturday 08 February 2025 11:38:51 -0500 (0:00:00.033) 0:00:26.221 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 => (item=(censored due to no_log))
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 08 February 2025 11:38:51 -0500 (0:00:00.073) 0:00:26.295 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 08 February 2025 11:38:51 -0500 (0:00:00.048) 0:00:26.343 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 08 February 2025 11:38:51 -0500 (0:00:00.035) 0:00:26.379 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Saturday 08 February 2025 11:38:51 -0500 (0:00:00.049) 0:00:26.428 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Saturday 08 February 2025 11:38:51 -0500 (0:00:00.033) 0:00:26.462 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Saturday 08 February 2025 11:38:51 -0500 (0:00:00.030) 0:00:26.492 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Saturday 08 February 2025 11:38:51 -0500 (0:00:00.027) 0:00:26.520 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Saturday 08 February 2025 11:38:51 -0500 (0:00:00.061) 0:00:26.582 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Saturday 08 February 2025 11:38:51 -0500 (0:00:00.029) 0:00:26.611 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Saturday 08 February 2025 11:38:51 -0500 (0:00:00.033) 0:00:26.645 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Saturday 08 February 2025 11:38:51 -0500 (0:00:00.040) 0:00:26.686 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Saturday 08 February 2025 11:38:51 -0500 (0:00:00.048) 0:00:26.734 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set variables part 2] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14
Saturday 08 February 2025 11:38:51 -0500 (0:00:00.062) 0:00:26.796 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_rootless": false,
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20
Saturday 08 February 2025 11:38:51 -0500 (0:00:00.044) 0:00:26.841 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 08 February 2025 11:38:51 -0500 (0:00:00.078) 0:00:26.919 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 08 February 2025 11:38:51 -0500 (0:00:00.043) 0:00:26.963 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 08 February 2025 11:38:51 -0500 (0:00:00.044) 0:00:27.008 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25
Saturday 08 February 2025 11:38:52 -0500 (0:00:00.043) 0:00:27.051 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Manage each secret] *******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41
Saturday 08 February 2025 11:38:52 -0500 (0:00:00.043) 0:00:27.095 *****
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182
Saturday 08 February 2025 11:38:52 -0500 (0:00:00.504) 0:00:27.599 *****
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189
Saturday 08 February 2025 11:38:52 -0500 (0:00:00.027) 0:00:27.626 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Saturday 08 February 2025 11:38:52 -0500 (0:00:00.140) 0:00:27.767 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "quadlet-demo.network",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "[Network]\nSubnet=192.168.30.0/24\nGateway=192.168.30.1\nLabel=app=wordpress",
"__podman_quadlet_template_src": ""
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Saturday 08 February 2025 11:38:52 -0500 (0:00:00.052) 0:00:27.819 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "created",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Saturday 08 February 2025 11:38:52 -0500 (0:00:00.048) 0:00:27.868 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Saturday 08 February 2025 11:38:52 -0500 (0:00:00.047) 0:00:27.916 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo",
"__podman_quadlet_type": "network",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Saturday 08 February 2025 11:38:53 -0500 (0:00:00.121) 0:00:28.037 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 08 February 2025 11:38:53 -0500 (0:00:00.097) 0:00:28.135 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 08 February 2025 11:38:53 -0500 (0:00:00.055) 0:00:28.190 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 08 February 2025 11:38:53 -0500 (0:00:00.057) 0:00:28.248 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Saturday 08 February 2025 11:38:53 -0500 (0:00:00.065) 0:00:28.313 *****
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1739032452.5561087,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
"ctime": 1739032446.715072,
"dev": 51714,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 9150722,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-pie-executable",
"mode": "0755",
"mtime": 1730678400.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 15744,
"uid": 0,
"version": "796201794",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Saturday 08 February 2025 11:38:53 -0500 (0:00:00.395) 0:00:28.709 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Saturday 08 February 2025 11:38:53 -0500 (0:00:00.033) 0:00:28.742 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Saturday 08 February 2025 11:38:53 -0500 (0:00:00.035) 0:00:28.778 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Saturday 08 February 2025 11:38:53 -0500 (0:00:00.048) 0:00:28.826 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Saturday 08 February 2025 11:38:53 -0500 (0:00:00.049) 0:00:28.876 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Saturday 08 February 2025 11:38:53 -0500 (0:00:00.051) 0:00:28.928 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Saturday 08 February 2025 11:38:53 -0500 (0:00:00.049) 0:00:28.977 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Saturday 08 February 2025 11:38:54 -0500 (0:00:00.051) 0:00:29.028 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Saturday 08 February 2025 11:38:54 -0500 (0:00:00.049) 0:00:29.078 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "quadlet-demo-network.service",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Saturday 08 February 2025 11:38:54 -0500 (0:00:00.083) 0:00:29.162 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Saturday 08 February 2025 11:38:54 -0500 (0:00:00.051) 0:00:29.213 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_kube_yamls_raw | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Saturday 08 February 2025 11:38:54 -0500 (0:00:00.050) 0:00:29.264 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.network",
"__podman_volumes": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105
Saturday 08 February 2025 11:38:54 -0500 (0:00:00.123) 0:00:29.387 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112
Saturday 08 February 2025 11:38:54 -0500 (0:00:00.046) 0:00:29.434 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state == \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116
Saturday 08 February 2025 11:38:54 -0500 (0:00:00.030) 0:00:29.464 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2
Saturday 08 February 2025 11:38:54 -0500 (0:00:00.151) 0:00:29.616 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 08 February 2025 11:38:54 -0500 (0:00:00.057) 0:00:29.673 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 08 February 2025 11:38:54 -0500 (0:00:00.031) 0:00:29.705 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 08 February 2025 11:38:54 -0500 (0:00:00.027) 0:00:29.733 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7
Saturday 08 February 2025 11:38:54 -0500 (0:00:00.028) 0:00:29.761 *****
skipping: [managed-node1] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
Saturday 08 February 2025 11:38:54 -0500 (0:00:00.025) 0:00:29.787 *****
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39
Saturday 08 February 2025 11:38:54 -0500 (0:00:00.028) 0:00:29.815 *****
ok: [managed-node1] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/etc/containers/systemd",
"secontext": "system_u:object_r:etc_t:s0",
"size": 6,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48
Saturday 08 February 2025 11:38:55 -0500 (0:00:00.390) 0:00:30.206 *****
changed: [managed-node1] => {
"changed": true,
"checksum": "e57c08d49aff4bae8daab138d913aeddaa8682a0",
"dest": "/etc/containers/systemd/quadlet-demo.network",
"gid": 0,
"group": "root",
"md5sum": "061f3cf318cbd8ab5794bb1173831fb8",
"mode": "0644",
"owner": "root",
"secontext": "system_u:object_r:etc_t:s0",
"size": 74,
"src": "/root/.ansible/tmp/ansible-tmp-1739032735.236063-21095-81303015722943/.source.network",
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58
Saturday 08 February 2025 11:38:56 -0500 (0:00:00.858) 0:00:31.064 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] *******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70
Saturday 08 February 2025 11:38:56 -0500 (0:00:00.049) 0:00:31.114 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_copy_file is skipped",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Reload systemctl] *********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82
Saturday 08 February 2025 11:38:56 -0500 (0:00:00.049) 0:00:31.163 *****
ok: [managed-node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Start service] ************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110
Saturday 08 February 2025 11:38:56 -0500 (0:00:00.779) 0:00:31.942 *****
changed: [managed-node1] => {
"changed": true,
"name": "quadlet-demo-network.service",
"state": "started",
"status": {
"AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0",
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "basic.target network-online.target sysinit.target systemd-journald.socket system.slice -.mount",
"AllowIsolate": "no",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "shutdown.target",
"BindLogSockets": "no",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "yes",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanLiveMount": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
"CleanResult": "success",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlGroupId": "0",
"ControlPID": "0",
"CoredumpFilter": "0x33",
"CoredumpReceive": "no",
"DebugInvocation": "no",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"DefaultStartupMemoryLow": "0",
"Delegate": "no",
"Description": "quadlet-demo-network.service",
"DevicePolicy": "auto",
"DynamicUser": "no",
"EffectiveMemoryHigh": "3698229248",
"EffectiveMemoryMax": "3698229248",
"EffectiveTasksMax": "22364",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainHandoffTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExitType": "main",
"ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FileDescriptorStorePreserve": "restart",
"FinalKillSignal": "9",
"FragmentPath": "/run/systemd/generator/quadlet-demo-network.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOReadBytes": "[not set]",
"IOReadOperations": "[not set]",
"IOSchedulingClass": "2",
"IOSchedulingPriority": "4",
"IOWeight": "[not set]",
"IOWriteBytes": "[not set]",
"IOWriteOperations": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "[no data]",
"IPEgressPackets": "[no data]",
"IPIngressBytes": "[no data]",
"IPIngressPackets": "[no data]",
"Id": "quadlet-demo-network.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "control-group",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "infinity",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "8388608",
"LimitMEMLOCKSoft": "8388608",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "524288",
"LimitNOFILESoft": "1024",
"LimitNPROC": "13977",
"LimitNPROCSoft": "13977",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "13977",
"LimitSIGPENDINGSoft": "13977",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LiveMountResult": "success",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"ManagedOOMMemoryPressure": "auto",
"ManagedOOMMemoryPressureDurationUSec": "[not set]",
"ManagedOOMMemoryPressureLimit": "0",
"ManagedOOMPreference": "none",
"ManagedOOMSwap": "auto",
"MemoryAccounting": "yes",
"MemoryAvailable": "3149176832",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryKSM": "no",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemoryPeak": "[not set]",
"MemoryPressureThresholdUSec": "200ms",
"MemoryPressureWatch": "auto",
"MemorySwapCurrent": "[not set]",
"MemorySwapMax": "infinity",
"MemorySwapPeak": "[not set]",
"MemoryZSwapCurrent": "[not set]",
"MemoryZSwapMax": "infinity",
"MemoryZSwapWriteback": "yes",
"MountAPIVFS": "no",
"MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAPolicy": "n/a",
"Names": "quadlet-demo-network.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMPolicy": "stop",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"OnSuccessJobMode": "fail",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateIPC": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivatePIDs": "no",
"PrivateTmp": "no",
"PrivateTmpEx": "no",
"PrivateUsers": "no",
"PrivateUsersEx": "no",
"ProcSubset": "all",
"ProtectClock": "no",
"ProtectControlGroups": "no",
"ProtectControlGroupsEx": "no",
"ProtectHome": "no",
"ProtectHostname": "no",
"ProtectKernelLogs": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectProc": "default",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"ReloadResult": "success",
"ReloadSignal": "1",
"RemainAfterExit": "yes",
"RemoveIPC": "no",
"Requires": "system.slice -.mount sysinit.target",
"RequiresMountsFor": "/run/containers",
"Restart": "no",
"RestartKillSignal": "15",
"RestartMaxDelayUSec": "infinity",
"RestartMode": "normal",
"RestartSteps": "0",
"RestartUSec": "100ms",
"RestartUSecNext": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RootEphemeral": "no",
"RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"RuntimeRandomizedExtraUSec": "0",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"SetLoginEnvironment": "no",
"Slice": "system.slice",
"SourcePath": "/etc/containers/systemd/quadlet-demo.network",
"StandardError": "inherit",
"StandardInput": "null",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StartupMemoryHigh": "infinity",
"StartupMemoryLow": "0",
"StartupMemoryMax": "infinity",
"StartupMemorySwapMax": "infinity",
"StartupMemoryZSwapMax": "infinity",
"StateChangeTimestampMonotonic": "0",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SurviveFinalKillSignal": "no",
"SyslogFacility": "3",
"SyslogIdentifier": "quadlet-demo-network",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "2147483646",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "22364",
"TimeoutAbortUSec": "1min 30s",
"TimeoutCleanUSec": "infinity",
"TimeoutStartFailureMode": "terminate",
"TimeoutStartUSec": "infinity",
"TimeoutStopFailureMode": "terminate",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "oneshot",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "generated",
"UtmpMode": "init",
"Wants": "network-online.target",
"WatchdogSignal": "6",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "infinity"
}
}
TASK [fedora.linux_system_roles.podman : Restart service] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125
Saturday 08 February 2025 11:38:57 -0500 (0:00:00.635) 0:00:32.578 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_service_started is changed",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Saturday 08 February 2025 11:38:57 -0500 (0:00:00.054) 0:00:32.632 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "quadlet-demo-mysql.volume",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "[Volume]",
"__podman_quadlet_template_src": ""
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Saturday 08 February 2025 11:38:57 -0500 (0:00:00.070) 0:00:32.702 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "created",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Saturday 08 February 2025 11:38:57 -0500 (0:00:00.059) 0:00:32.762 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Saturday 08 February 2025 11:38:57 -0500 (0:00:00.056) 0:00:32.818 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo-mysql",
"__podman_quadlet_type": "volume",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Saturday 08 February 2025 11:38:57 -0500 (0:00:00.121) 0:00:32.940 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 08 February 2025 11:38:58 -0500 (0:00:00.227) 0:00:33.168 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 08 February 2025 11:38:58 -0500 (0:00:00.067) 0:00:33.235 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 08 February 2025 11:38:58 -0500 (0:00:00.065) 0:00:33.300 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Saturday 08 February 2025 11:38:58 -0500 (0:00:00.077) 0:00:33.377 *****
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1739032452.5561087,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
"ctime": 1739032446.715072,
"dev": 51714,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 9150722,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-pie-executable",
"mode": "0755",
"mtime": 1730678400.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 15744,
"uid": 0,
"version": "796201794",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Saturday 08 February 2025 11:38:58 -0500 (0:00:00.412) 0:00:33.789 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Saturday 08 February 2025 11:38:58 -0500 (0:00:00.034) 0:00:33.824 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Saturday 08 February 2025 11:38:58 -0500 (0:00:00.037) 0:00:33.862 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Saturday 08 February 2025 11:38:58 -0500 (0:00:00.035) 0:00:33.897 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Saturday 08 February 2025 11:38:58 -0500 (0:00:00.041) 0:00:33.939 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Saturday 08 February 2025 11:38:58 -0500 (0:00:00.030) 0:00:33.969 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Saturday 08 February 2025 11:38:58 -0500 (0:00:00.034) 0:00:34.004 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Saturday 08 February 2025 11:38:59 -0500 (0:00:00.048) 0:00:34.053 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Saturday 08 February 2025 11:38:59 -0500 (0:00:00.053) 0:00:34.107 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "quadlet-demo-mysql-volume.service",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Saturday 08 February 2025 11:38:59 -0500 (0:00:00.094) 0:00:34.202 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Saturday 08 February 2025 11:38:59 -0500 (0:00:00.061) 0:00:34.263 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_kube_yamls_raw | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Saturday 08 February 2025 11:38:59 -0500 (0:00:00.050) 0:00:34.313 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.volume",
"__podman_volumes": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105
Saturday 08 February 2025 11:38:59 -0500 (0:00:00.144) 0:00:34.458 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112
Saturday 08 February 2025 11:38:59 -0500 (0:00:00.061) 0:00:34.519 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state == \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116
Saturday 08 February 2025 11:38:59 -0500 (0:00:00.048) 0:00:34.568 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2
Saturday 08 February 2025 11:38:59 -0500 (0:00:00.124) 0:00:34.692 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 08 February 2025 11:38:59 -0500 (0:00:00.265) 0:00:34.957 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 08 February 2025 11:39:00 -0500 (0:00:00.062) 0:00:35.020 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 08 February 2025 11:39:00 -0500 (0:00:00.048) 0:00:35.068 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7
Saturday 08 February 2025 11:39:00 -0500 (0:00:00.049) 0:00:35.118 *****
skipping: [managed-node1] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
Saturday 08 February 2025 11:39:00 -0500 (0:00:00.045) 0:00:35.163 *****
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39
Saturday 08 February 2025 11:39:00 -0500 (0:00:00.046) 0:00:35.210 *****
ok: [managed-node1] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/etc/containers/systemd",
"secontext": "system_u:object_r:etc_t:s0",
"size": 34,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48
Saturday 08 February 2025 11:39:00 -0500 (0:00:00.450) 0:00:35.660 *****
changed: [managed-node1] => {
"changed": true,
"checksum": "585f8cbdf0ec73000f9227dcffbef71e9552ea4a",
"dest": "/etc/containers/systemd/quadlet-demo-mysql.volume",
"gid": 0,
"group": "root",
"md5sum": "5ddd03a022aeb4502d9bc8ce436b4233",
"mode": "0644",
"owner": "root",
"secontext": "system_u:object_r:etc_t:s0",
"size": 9,
"src": "/root/.ansible/tmp/ansible-tmp-1739032740.7139578-21333-262300184216330/.source.volume",
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58
Saturday 08 February 2025 11:39:01 -0500 (0:00:00.759) 0:00:36.419 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] *******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70
Saturday 08 February 2025 11:39:01 -0500 (0:00:00.065) 0:00:36.485 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_copy_file is skipped",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Reload systemctl] *********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82
Saturday 08 February 2025 11:39:01 -0500 (0:00:00.060) 0:00:36.546 *****
ok: [managed-node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Start service] ************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110
Saturday 08 February 2025 11:39:02 -0500 (0:00:00.772) 0:00:37.318 *****
changed: [managed-node1] => {
"changed": true,
"name": "quadlet-demo-mysql-volume.service",
"state": "started",
"status": {
"AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0",
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "systemd-journald.socket system.slice -.mount sysinit.target network-online.target basic.target",
"AllowIsolate": "no",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "shutdown.target",
"BindLogSockets": "no",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "yes",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanLiveMount": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
"CleanResult": "success",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlGroupId": "0",
"ControlPID": "0",
"CoredumpFilter": "0x33",
"CoredumpReceive": "no",
"DebugInvocation": "no",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"DefaultStartupMemoryLow": "0",
"Delegate": "no",
"Description": "quadlet-demo-mysql-volume.service",
"DevicePolicy": "auto",
"DynamicUser": "no",
"EffectiveMemoryHigh": "3698229248",
"EffectiveMemoryMax": "3698229248",
"EffectiveTasksMax": "22364",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainHandoffTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExitType": "main",
"ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FileDescriptorStorePreserve": "restart",
"FinalKillSignal": "9",
"FragmentPath": "/run/systemd/generator/quadlet-demo-mysql-volume.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOReadBytes": "[not set]",
"IOReadOperations": "[not set]",
"IOSchedulingClass": "2",
"IOSchedulingPriority": "4",
"IOWeight": "[not set]",
"IOWriteBytes": "[not set]",
"IOWriteOperations": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "[no data]",
"IPEgressPackets": "[no data]",
"IPIngressBytes": "[no data]",
"IPIngressPackets": "[no data]",
"Id": "quadlet-demo-mysql-volume.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "control-group",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "infinity",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "8388608",
"LimitMEMLOCKSoft": "8388608",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "524288",
"LimitNOFILESoft": "1024",
"LimitNPROC": "13977",
"LimitNPROCSoft": "13977",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "13977",
"LimitSIGPENDINGSoft": "13977",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LiveMountResult": "success",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"ManagedOOMMemoryPressure": "auto",
"ManagedOOMMemoryPressureDurationUSec": "[not set]",
"ManagedOOMMemoryPressureLimit": "0",
"ManagedOOMPreference": "none",
"ManagedOOMSwap": "auto",
"MemoryAccounting": "yes",
"MemoryAvailable": "3158646784",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryKSM": "no",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemoryPeak": "[not set]",
"MemoryPressureThresholdUSec": "200ms",
"MemoryPressureWatch": "auto",
"MemorySwapCurrent": "[not set]",
"MemorySwapMax": "infinity",
"MemorySwapPeak": "[not set]",
"MemoryZSwapCurrent": "[not set]",
"MemoryZSwapMax": "infinity",
"MemoryZSwapWriteback": "yes",
"MountAPIVFS": "no",
"MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAPolicy": "n/a",
"Names": "quadlet-demo-mysql-volume.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMPolicy": "stop",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"OnSuccessJobMode": "fail",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateIPC": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivatePIDs": "no",
"PrivateTmp": "no",
"PrivateTmpEx": "no",
"PrivateUsers": "no",
"PrivateUsersEx": "no",
"ProcSubset": "all",
"ProtectClock": "no",
"ProtectControlGroups": "no",
"ProtectControlGroupsEx": "no",
"ProtectHome": "no",
"ProtectHostname": "no",
"ProtectKernelLogs": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectProc": "default",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"ReloadResult": "success",
"ReloadSignal": "1",
"RemainAfterExit": "yes",
"RemoveIPC": "no",
"Requires": "sysinit.target -.mount system.slice",
"RequiresMountsFor": "/run/containers",
"Restart": "no",
"RestartKillSignal": "15",
"RestartMaxDelayUSec": "infinity",
"RestartMode": "normal",
"RestartSteps": "0",
"RestartUSec": "100ms",
"RestartUSecNext": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RootEphemeral": "no",
"RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"RuntimeRandomizedExtraUSec": "0",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"SetLoginEnvironment": "no",
"Slice": "system.slice",
"SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.volume",
"StandardError": "inherit",
"StandardInput": "null",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StartupMemoryHigh": "infinity",
"StartupMemoryLow": "0",
"StartupMemoryMax": "infinity",
"StartupMemorySwapMax": "infinity",
"StartupMemoryZSwapMax": "infinity",
"StateChangeTimestampMonotonic": "0",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SurviveFinalKillSignal": "no",
"SyslogFacility": "3",
"SyslogIdentifier": "quadlet-demo-mysql-volume",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "2147483646",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "22364",
"TimeoutAbortUSec": "1min 30s",
"TimeoutCleanUSec": "infinity",
"TimeoutStartFailureMode": "terminate",
"TimeoutStartUSec": "infinity",
"TimeoutStopFailureMode": "terminate",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "oneshot",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "generated",
"UtmpMode": "init",
"Wants": "network-online.target",
"WatchdogSignal": "6",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "infinity"
}
}
TASK [fedora.linux_system_roles.podman : Restart service] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125
Saturday 08 February 2025 11:39:02 -0500 (0:00:00.636) 0:00:37.954 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_service_started is changed",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Saturday 08 February 2025 11:39:03 -0500 (0:00:00.061) 0:00:38.016 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Container]\nImage=quay.io/linux-system-roles/mysql:5.6\nContainerName=quadlet-demo-mysql\nVolume=quadlet-demo-mysql.volume:/var/lib/mysql\nVolume=/tmp/quadlet_demo:/var/lib/quadlet_demo:Z\nNetwork=quadlet-demo.network\nSecret=mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD\nHealthCmd=/bin/true\nHealthOnFailure=kill\n",
"__podman_quadlet_template_src": "quadlet-demo-mysql.container.j2"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Saturday 08 February 2025 11:39:03 -0500 (0:00:00.107) 0:00:38.123 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "created",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Saturday 08 February 2025 11:39:03 -0500 (0:00:00.045) 0:00:38.169 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_str",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Saturday 08 February 2025 11:39:03 -0500 (0:00:00.034) 0:00:38.204 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo-mysql",
"__podman_quadlet_type": "container",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Saturday 08 February 2025 11:39:03 -0500 (0:00:00.045) 0:00:38.249 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 08 February 2025 11:39:03 -0500 (0:00:00.055) 0:00:38.304 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 08 February 2025 11:39:03 -0500 (0:00:00.032) 0:00:38.336 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 08 February 2025 11:39:03 -0500 (0:00:00.030) 0:00:38.367 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Saturday 08 February 2025 11:39:03 -0500 (0:00:00.096) 0:00:38.463 *****
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1739032452.5561087,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
"ctime": 1739032446.715072,
"dev": 51714,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 9150722,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-pie-executable",
"mode": "0755",
"mtime": 1730678400.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 15744,
"uid": 0,
"version": "796201794",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Saturday 08 February 2025 11:39:03 -0500 (0:00:00.422) 0:00:38.886 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Saturday 08 February 2025 11:39:03 -0500 (0:00:00.058) 0:00:38.945 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Saturday 08 February 2025 11:39:03 -0500 (0:00:00.054) 0:00:38.999 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Saturday 08 February 2025 11:39:04 -0500 (0:00:00.058) 0:00:39.058 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Saturday 08 February 2025 11:39:04 -0500 (0:00:00.051) 0:00:39.110 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Saturday 08 February 2025 11:39:04 -0500 (0:00:00.052) 0:00:39.163 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Saturday 08 February 2025 11:39:04 -0500 (0:00:00.054) 0:00:39.217 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Saturday 08 February 2025 11:39:04 -0500 (0:00:00.055) 0:00:39.272 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Saturday 08 February 2025 11:39:04 -0500 (0:00:00.047) 0:00:39.320 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [
"quay.io/linux-system-roles/mysql:5.6"
],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "quadlet-demo-mysql.service",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Saturday 08 February 2025 11:39:04 -0500 (0:00:00.086) 0:00:39.406 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Saturday 08 February 2025 11:39:04 -0500 (0:00:00.048) 0:00:39.455 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_kube_yamls_raw | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Saturday 08 February 2025 11:39:04 -0500 (0:00:00.044) 0:00:39.500 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [
"quay.io/linux-system-roles/mysql:5.6"
],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.container",
"__podman_volumes": [
"/tmp/quadlet_demo"
]
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105
Saturday 08 February 2025 11:39:04 -0500 (0:00:00.096) 0:00:39.596 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112
Saturday 08 February 2025 11:39:04 -0500 (0:00:00.063) 0:00:39.660 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state == \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116
Saturday 08 February 2025 11:39:04 -0500 (0:00:00.047) 0:00:39.708 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2
Saturday 08 February 2025 11:39:04 -0500 (0:00:00.077) 0:00:39.786 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 08 February 2025 11:39:04 -0500 (0:00:00.050) 0:00:39.836 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 08 February 2025 11:39:04 -0500 (0:00:00.027) 0:00:39.864 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 08 February 2025 11:39:04 -0500 (0:00:00.027) 0:00:39.891 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7
Saturday 08 February 2025 11:39:04 -0500 (0:00:00.061) 0:00:39.952 *****
changed: [managed-node1] => (item=/tmp/quadlet_demo) => {
"ansible_loop_var": "item",
"changed": true,
"gid": 0,
"group": "root",
"item": "/tmp/quadlet_demo",
"mode": "0777",
"owner": "root",
"path": "/tmp/quadlet_demo",
"secontext": "unconfined_u:object_r:user_tmp_t:s0",
"size": 6,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
Saturday 08 February 2025 11:39:05 -0500 (0:00:00.381) 0:00:40.334 *****
changed: [managed-node1] => (item=None) => {
"attempts": 1,
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39
Saturday 08 February 2025 11:39:11 -0500 (0:00:06.256) 0:00:46.591 *****
ok: [managed-node1] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/etc/containers/systemd",
"secontext": "system_u:object_r:etc_t:s0",
"size": 67,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48
Saturday 08 February 2025 11:39:11 -0500 (0:00:00.381) 0:00:46.973 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_quadlet_file_src | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58
Saturday 08 February 2025 11:39:11 -0500 (0:00:00.031) 0:00:47.004 *****
changed: [managed-node1] => {
"changed": true,
"checksum": "ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4",
"dest": "/etc/containers/systemd/quadlet-demo-mysql.container",
"gid": 0,
"group": "root",
"md5sum": "341b473056d2a5dfa35970b0d2e23a5d",
"mode": "0644",
"owner": "root",
"secontext": "system_u:object_r:etc_t:s0",
"size": 363,
"src": "/root/.ansible/tmp/ansible-tmp-1739032752.035067-21647-78977375058148/.source.container",
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] *******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70
Saturday 08 February 2025 11:39:12 -0500 (0:00:00.690) 0:00:47.695 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_copy_content is skipped",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Reload systemctl] *********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82
Saturday 08 February 2025 11:39:12 -0500 (0:00:00.029) 0:00:47.725 *****
ok: [managed-node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Start service] ************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110
Saturday 08 February 2025 11:39:13 -0500 (0:00:00.737) 0:00:48.462 *****
changed: [managed-node1] => {
"changed": true,
"name": "quadlet-demo-mysql.service",
"state": "started",
"status": {
"AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0",
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "system.slice sysinit.target systemd-journald.socket basic.target -.mount tmp.mount quadlet-demo-mysql-volume.service quadlet-demo-network.service network-online.target",
"AllowIsolate": "no",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "multi-user.target shutdown.target",
"BindLogSockets": "no",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "yes",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanLiveMount": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
"CleanResult": "success",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlGroupId": "0",
"ControlPID": "0",
"CoredumpFilter": "0x33",
"CoredumpReceive": "no",
"DebugInvocation": "no",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"DefaultStartupMemoryLow": "0",
"Delegate": "yes",
"DelegateControllers": "cpu cpuset io memory pids",
"Description": "quadlet-demo-mysql.service",
"DevicePolicy": "auto",
"DynamicUser": "no",
"EffectiveMemoryHigh": "3698229248",
"EffectiveMemoryMax": "3698229248",
"EffectiveTasksMax": "22364",
"Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainHandoffTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExitType": "main",
"ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FileDescriptorStorePreserve": "restart",
"FinalKillSignal": "9",
"FragmentPath": "/run/systemd/generator/quadlet-demo-mysql.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOReadBytes": "[not set]",
"IOReadOperations": "[not set]",
"IOSchedulingClass": "2",
"IOSchedulingPriority": "4",
"IOWeight": "[not set]",
"IOWriteBytes": "[not set]",
"IOWriteOperations": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "[no data]",
"IPEgressPackets": "[no data]",
"IPIngressBytes": "[no data]",
"IPIngressPackets": "[no data]",
"Id": "quadlet-demo-mysql.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "infinity",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "8388608",
"LimitMEMLOCKSoft": "8388608",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "524288",
"LimitNOFILESoft": "1024",
"LimitNPROC": "13977",
"LimitNPROCSoft": "13977",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "13977",
"LimitSIGPENDINGSoft": "13977",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LiveMountResult": "success",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"ManagedOOMMemoryPressure": "auto",
"ManagedOOMMemoryPressureDurationUSec": "[not set]",
"ManagedOOMMemoryPressureLimit": "0",
"ManagedOOMPreference": "none",
"ManagedOOMSwap": "auto",
"MemoryAccounting": "yes",
"MemoryAvailable": "3060355072",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryKSM": "no",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemoryPeak": "[not set]",
"MemoryPressureThresholdUSec": "200ms",
"MemoryPressureWatch": "auto",
"MemorySwapCurrent": "[not set]",
"MemorySwapMax": "infinity",
"MemorySwapPeak": "[not set]",
"MemoryZSwapCurrent": "[not set]",
"MemoryZSwapMax": "infinity",
"MemoryZSwapWriteback": "yes",
"MountAPIVFS": "no",
"MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAPolicy": "n/a",
"Names": "quadlet-demo-mysql.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "all",
"OOMPolicy": "continue",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"OnSuccessJobMode": "fail",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateIPC": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivatePIDs": "no",
"PrivateTmp": "no",
"PrivateTmpEx": "no",
"PrivateUsers": "no",
"PrivateUsersEx": "no",
"ProcSubset": "all",
"ProtectClock": "no",
"ProtectControlGroups": "no",
"ProtectControlGroupsEx": "no",
"ProtectHome": "no",
"ProtectHostname": "no",
"ProtectKernelLogs": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectProc": "default",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"ReloadResult": "success",
"ReloadSignal": "1",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "-.mount sysinit.target quadlet-demo-network.service quadlet-demo-mysql-volume.service system.slice",
"RequiresMountsFor": "/run/containers /tmp/quadlet_demo",
"Restart": "no",
"RestartKillSignal": "15",
"RestartMaxDelayUSec": "infinity",
"RestartMode": "normal",
"RestartSteps": "0",
"RestartUSec": "100ms",
"RestartUSecNext": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RootEphemeral": "no",
"RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"RuntimeRandomizedExtraUSec": "0",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"SetLoginEnvironment": "no",
"Slice": "system.slice",
"SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.container",
"StandardError": "inherit",
"StandardInput": "null",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StartupMemoryHigh": "infinity",
"StartupMemoryLow": "0",
"StartupMemoryMax": "infinity",
"StartupMemorySwapMax": "infinity",
"StartupMemoryZSwapMax": "infinity",
"StateChangeTimestampMonotonic": "0",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SurviveFinalKillSignal": "no",
"SyslogFacility": "3",
"SyslogIdentifier": "quadlet-demo-mysql",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "2147483646",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "22364",
"TimeoutAbortUSec": "1min 30s",
"TimeoutCleanUSec": "infinity",
"TimeoutStartFailureMode": "terminate",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopFailureMode": "terminate",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "notify",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "generated",
"UtmpMode": "init",
"WantedBy": "multi-user.target",
"Wants": "network-online.target",
"WatchdogSignal": "6",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "infinity"
}
}
TASK [fedora.linux_system_roles.podman : Restart service] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125
Saturday 08 February 2025 11:39:14 -0500 (0:00:00.902) 0:00:49.364 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_service_started is changed",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Saturday 08 February 2025 11:39:14 -0500 (0:00:00.050) 0:00:49.415 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "envoy-proxy-configmap.yml",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "---\napiVersion: v1\nkind: ConfigMap\nmetadata:\n name: envoy-proxy-config\ndata:\n envoy.yaml: |\n admin:\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 9901\n\n static_resources:\n listeners:\n - name: listener_0\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 8080\n filter_chains:\n - filters:\n - name: envoy.filters.network.http_connection_manager\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager\n stat_prefix: ingress_http\n codec_type: AUTO\n route_config:\n name: local_route\n virtual_hosts:\n - name: local_service\n domains: [\"*\"]\n routes:\n - match:\n prefix: \"/\"\n route:\n cluster: backend\n http_filters:\n - name: envoy.filters.http.router\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.http.router.v3.Router\n transport_socket:\n name: envoy.transport_sockets.tls\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.transport_sockets.tls.v3.DownstreamTlsContext\n common_tls_context:\n tls_certificates:\n - certificate_chain:\n filename: /etc/envoy-certificates/certificate.pem\n private_key:\n filename: /etc/envoy-certificates/certificate.key\n clusters:\n - name: backend\n connect_timeout: 5s\n type: STATIC\n dns_refresh_rate: 1800s\n lb_policy: ROUND_ROBIN\n load_assignment:\n cluster_name: backend\n endpoints:\n - lb_endpoints:\n - endpoint:\n address:\n socket_address:\n address: 127.0.0.1\n port_value: 80",
"__podman_quadlet_template_src": ""
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Saturday 08 February 2025 11:39:14 -0500 (0:00:00.053) 0:00:49.469 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "created",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Saturday 08 February 2025 11:39:14 -0500 (0:00:00.044) 0:00:49.514 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Saturday 08 February 2025 11:39:14 -0500 (0:00:00.036) 0:00:49.551 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "envoy-proxy-configmap",
"__podman_quadlet_type": "yml",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Saturday 08 February 2025 11:39:14 -0500 (0:00:00.048) 0:00:49.599 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 08 February 2025 11:39:14 -0500 (0:00:00.055) 0:00:49.654 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 08 February 2025 11:39:14 -0500 (0:00:00.034) 0:00:49.689 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 08 February 2025 11:39:14 -0500 (0:00:00.057) 0:00:49.746 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Saturday 08 February 2025 11:39:14 -0500 (0:00:00.073) 0:00:49.820 *****
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1739032452.5561087,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
"ctime": 1739032446.715072,
"dev": 51714,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 9150722,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-pie-executable",
"mode": "0755",
"mtime": 1730678400.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 15744,
"uid": 0,
"version": "796201794",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Saturday 08 February 2025 11:39:15 -0500 (0:00:00.535) 0:00:50.356 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Saturday 08 February 2025 11:39:15 -0500 (0:00:00.094) 0:00:50.451 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Saturday 08 February 2025 11:39:15 -0500 (0:00:00.156) 0:00:50.607 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Saturday 08 February 2025 11:39:15 -0500 (0:00:00.101) 0:00:50.709 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Saturday 08 February 2025 11:39:15 -0500 (0:00:00.099) 0:00:50.808 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Saturday 08 February 2025 11:39:15 -0500 (0:00:00.093) 0:00:50.902 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Saturday 08 February 2025 11:39:15 -0500 (0:00:00.082) 0:00:50.984 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Saturday 08 February 2025 11:39:16 -0500 (0:00:00.100) 0:00:51.085 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Saturday 08 February 2025 11:39:16 -0500 (0:00:00.104) 0:00:51.189 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Saturday 08 February 2025 11:39:16 -0500 (0:00:00.145) 0:00:51.335 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Saturday 08 February 2025 11:39:16 -0500 (0:00:00.096) 0:00:51.431 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_kube_yamls_raw | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Saturday 08 February 2025 11:39:16 -0500 (0:00:00.055) 0:00:51.487 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [],
"__podman_quadlet_file": "/etc/containers/systemd/envoy-proxy-configmap.yml",
"__podman_volumes": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105
Saturday 08 February 2025 11:39:16 -0500 (0:00:00.160) 0:00:51.647 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112
Saturday 08 February 2025 11:39:16 -0500 (0:00:00.095) 0:00:51.743 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state == \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116
Saturday 08 February 2025 11:39:16 -0500 (0:00:00.062) 0:00:51.806 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2
Saturday 08 February 2025 11:39:16 -0500 (0:00:00.139) 0:00:51.946 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 08 February 2025 11:39:17 -0500 (0:00:00.086) 0:00:52.032 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 08 February 2025 11:39:17 -0500 (0:00:00.045) 0:00:52.077 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 08 February 2025 11:39:17 -0500 (0:00:00.047) 0:00:52.125 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7
Saturday 08 February 2025 11:39:17 -0500 (0:00:00.051) 0:00:52.176 *****
skipping: [managed-node1] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
Saturday 08 February 2025 11:39:17 -0500 (0:00:00.077) 0:00:52.254 *****
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39
Saturday 08 February 2025 11:39:17 -0500 (0:00:00.095) 0:00:52.349 *****
ok: [managed-node1] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/etc/containers/systemd",
"secontext": "system_u:object_r:etc_t:s0",
"size": 103,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48
Saturday 08 February 2025 11:39:17 -0500 (0:00:00.550) 0:00:52.900 *****
changed: [managed-node1] => {
"changed": true,
"checksum": "d681c7d56f912150d041873e880818b22a90c188",
"dest": "/etc/containers/systemd/envoy-proxy-configmap.yml",
"gid": 0,
"group": "root",
"md5sum": "aec75d972c231aac004e1338934544cf",
"mode": "0644",
"owner": "root",
"secontext": "system_u:object_r:etc_t:s0",
"size": 2102,
"src": "/root/.ansible/tmp/ansible-tmp-1739032757.942965-21894-226207580527898/.source.yml",
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58
Saturday 08 February 2025 11:39:18 -0500 (0:00:00.722) 0:00:53.622 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] *******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70
Saturday 08 February 2025 11:39:18 -0500 (0:00:00.049) 0:00:53.672 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_copy_file is skipped",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Reload systemctl] *********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82
Saturday 08 February 2025 11:39:18 -0500 (0:00:00.046) 0:00:53.719 *****
ok: [managed-node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Start service] ************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110
Saturday 08 February 2025 11:39:19 -0500 (0:00:00.985) 0:00:54.704 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_service_name | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Restart service] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125
Saturday 08 February 2025 11:39:19 -0500 (0:00:00.033) 0:00:54.738 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_service_name | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Saturday 08 February 2025 11:39:19 -0500 (0:00:00.034) 0:00:54.773 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "---\napiVersion: v1\nkind: PersistentVolumeClaim\nmetadata:\n name: wp-pv-claim\n labels:\n app: wordpress\nspec:\n accessModes:\n - ReadWriteOnce\n resources:\n requests:\n storage: 20Gi\n---\napiVersion: v1\nkind: Pod\nmetadata:\n name: quadlet-demo\nspec:\n containers:\n - name: wordpress\n image: quay.io/linux-system-roles/wordpress:4.8-apache\n env:\n - name: WORDPRESS_DB_HOST\n value: quadlet-demo-mysql\n - name: WORDPRESS_DB_PASSWORD\n valueFrom:\n secretKeyRef:\n name: mysql-root-password-kube\n key: password\n volumeMounts:\n - name: wordpress-persistent-storage\n mountPath: /var/www/html\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n - name: envoy\n image: quay.io/linux-system-roles/envoyproxy:v1.25.0\n volumeMounts:\n - name: config-volume\n mountPath: /etc/envoy\n - name: certificates\n mountPath: /etc/envoy-certificates\n env:\n - name: ENVOY_UID\n value: \"0\"\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n volumes:\n - name: config-volume\n configMap:\n name: envoy-proxy-config\n - name: certificates\n secret:\n secretName: envoy-certificates\n - name: wordpress-persistent-storage\n persistentVolumeClaim:\n claimName: wp-pv-claim\n - name: www # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3\n - name: create # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3-create\n",
"__podman_quadlet_template_src": "quadlet-demo.yml.j2"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Saturday 08 February 2025 11:39:19 -0500 (0:00:00.080) 0:00:54.853 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "created",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Saturday 08 February 2025 11:39:19 -0500 (0:00:00.041) 0:00:54.894 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_str",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Saturday 08 February 2025 11:39:19 -0500 (0:00:00.046) 0:00:54.940 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo",
"__podman_quadlet_type": "yml",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Saturday 08 February 2025 11:39:20 -0500 (0:00:00.077) 0:00:55.018 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 08 February 2025 11:39:20 -0500 (0:00:00.094) 0:00:55.112 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 08 February 2025 11:39:20 -0500 (0:00:00.054) 0:00:55.166 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 08 February 2025 11:39:20 -0500 (0:00:00.038) 0:00:55.204 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Saturday 08 February 2025 11:39:20 -0500 (0:00:00.046) 0:00:55.251 *****
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1739032452.5561087,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
"ctime": 1739032446.715072,
"dev": 51714,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 9150722,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-pie-executable",
"mode": "0755",
"mtime": 1730678400.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 15744,
"uid": 0,
"version": "796201794",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Saturday 08 February 2025 11:39:20 -0500 (0:00:00.554) 0:00:55.805 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Saturday 08 February 2025 11:39:20 -0500 (0:00:00.032) 0:00:55.837 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Saturday 08 February 2025 11:39:20 -0500 (0:00:00.030) 0:00:55.868 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Saturday 08 February 2025 11:39:20 -0500 (0:00:00.031) 0:00:55.899 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Saturday 08 February 2025 11:39:20 -0500 (0:00:00.034) 0:00:55.934 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Saturday 08 February 2025 11:39:20 -0500 (0:00:00.077) 0:00:56.011 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Saturday 08 February 2025 11:39:21 -0500 (0:00:00.036) 0:00:56.048 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Saturday 08 February 2025 11:39:21 -0500 (0:00:00.035) 0:00:56.083 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Saturday 08 February 2025 11:39:21 -0500 (0:00:00.030) 0:00:56.113 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Saturday 08 February 2025 11:39:21 -0500 (0:00:00.050) 0:00:56.164 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Saturday 08 February 2025 11:39:21 -0500 (0:00:00.031) 0:00:56.195 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_kube_yamls_raw | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Saturday 08 February 2025 11:39:21 -0500 (0:00:00.031) 0:00:56.227 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.yml",
"__podman_volumes": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105
Saturday 08 February 2025 11:39:21 -0500 (0:00:00.117) 0:00:56.344 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112
Saturday 08 February 2025 11:39:21 -0500 (0:00:00.042) 0:00:56.386 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state == \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116
Saturday 08 February 2025 11:39:21 -0500 (0:00:00.034) 0:00:56.421 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2
Saturday 08 February 2025 11:39:21 -0500 (0:00:00.078) 0:00:56.500 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 08 February 2025 11:39:21 -0500 (0:00:00.056) 0:00:56.556 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 08 February 2025 11:39:21 -0500 (0:00:00.027) 0:00:56.583 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 08 February 2025 11:39:21 -0500 (0:00:00.028) 0:00:56.612 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7
Saturday 08 February 2025 11:39:21 -0500 (0:00:00.028) 0:00:56.640 *****
skipping: [managed-node1] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
Saturday 08 February 2025 11:39:21 -0500 (0:00:00.024) 0:00:56.665 *****
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39
Saturday 08 February 2025 11:39:21 -0500 (0:00:00.027) 0:00:56.693 *****
ok: [managed-node1] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/etc/containers/systemd",
"secontext": "system_u:object_r:etc_t:s0",
"size": 136,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48
Saturday 08 February 2025 11:39:22 -0500 (0:00:00.375) 0:00:57.068 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_quadlet_file_src | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58
Saturday 08 February 2025 11:39:22 -0500 (0:00:00.036) 0:00:57.105 *****
changed: [managed-node1] => {
"changed": true,
"checksum": "998dccde0483b1654327a46ddd89cbaa47650370",
"dest": "/etc/containers/systemd/quadlet-demo.yml",
"gid": 0,
"group": "root",
"md5sum": "fd890594adfc24339cb9cdc5e7b19a66",
"mode": "0644",
"owner": "root",
"secontext": "system_u:object_r:etc_t:s0",
"size": 1605,
"src": "/root/.ansible/tmp/ansible-tmp-1739032762.141144-22084-275772725018437/.source.yml",
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] *******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70
Saturday 08 February 2025 11:39:22 -0500 (0:00:00.673) 0:00:57.778 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_copy_content is skipped",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Reload systemctl] *********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82
Saturday 08 February 2025 11:39:22 -0500 (0:00:00.065) 0:00:57.844 *****
ok: [managed-node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Start service] ************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110
Saturday 08 February 2025 11:39:23 -0500 (0:00:00.760) 0:00:58.604 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_service_name | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Restart service] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125
Saturday 08 February 2025 11:39:23 -0500 (0:00:00.039) 0:00:58.644 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_service_name | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Saturday 08 February 2025 11:39:23 -0500 (0:00:00.038) 0:00:58.682 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "quadlet-demo.kube",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Unit]\nRequires=quadlet-demo-mysql.service\nAfter=quadlet-demo-mysql.service\n\n[Kube]\n# Point to the yaml file in the same directory\nYaml=quadlet-demo.yml\n# Use the quadlet-demo network\nNetwork=quadlet-demo.network\n# Publish the envoy proxy data port\nPublishPort=8000:8080\n# Publish the envoy proxy admin port\nPublishPort=9000:9901\n# Use the envoy proxy config map in the same directory\nConfigMap=envoy-proxy-configmap.yml",
"__podman_quadlet_template_src": ""
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Saturday 08 February 2025 11:39:23 -0500 (0:00:00.052) 0:00:58.734 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "created",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Saturday 08 February 2025 11:39:23 -0500 (0:00:00.036) 0:00:58.771 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Saturday 08 February 2025 11:39:23 -0500 (0:00:00.030) 0:00:58.802 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo",
"__podman_quadlet_type": "kube",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Saturday 08 February 2025 11:39:23 -0500 (0:00:00.043) 0:00:58.846 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 08 February 2025 11:39:23 -0500 (0:00:00.056) 0:00:58.902 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 08 February 2025 11:39:23 -0500 (0:00:00.035) 0:00:58.938 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 08 February 2025 11:39:23 -0500 (0:00:00.056) 0:00:58.994 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Saturday 08 February 2025 11:39:24 -0500 (0:00:00.051) 0:00:59.046 *****
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1739032452.5561087,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
"ctime": 1739032446.715072,
"dev": 51714,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 9150722,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-pie-executable",
"mode": "0755",
"mtime": 1730678400.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 15744,
"uid": 0,
"version": "796201794",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Saturday 08 February 2025 11:39:24 -0500 (0:00:00.392) 0:00:59.439 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Saturday 08 February 2025 11:39:24 -0500 (0:00:00.037) 0:00:59.476 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Saturday 08 February 2025 11:39:24 -0500 (0:00:00.036) 0:00:59.513 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Saturday 08 February 2025 11:39:24 -0500 (0:00:00.034) 0:00:59.547 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Saturday 08 February 2025 11:39:24 -0500 (0:00:00.030) 0:00:59.577 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Saturday 08 February 2025 11:39:24 -0500 (0:00:00.029) 0:00:59.607 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Saturday 08 February 2025 11:39:24 -0500 (0:00:00.029) 0:00:59.636 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Saturday 08 February 2025 11:39:24 -0500 (0:00:00.030) 0:00:59.667 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Saturday 08 February 2025 11:39:24 -0500 (0:00:00.029) 0:00:59.697 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": [
"quadlet-demo.yml"
],
"__podman_service_name": "quadlet-demo.service",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Saturday 08 February 2025 11:39:24 -0500 (0:00:00.174) 0:00:59.871 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Saturday 08 February 2025 11:39:24 -0500 (0:00:00.052) 0:00:59.924 *****
ok: [managed-node1] => {
"changed": false,
"content": "LS0tCmFwaVZlcnNpb246IHYxCmtpbmQ6IFBlcnNpc3RlbnRWb2x1bWVDbGFpbQptZXRhZGF0YToKICBuYW1lOiB3cC1wdi1jbGFpbQogIGxhYmVsczoKICAgIGFwcDogd29yZHByZXNzCnNwZWM6CiAgYWNjZXNzTW9kZXM6CiAgLSBSZWFkV3JpdGVPbmNlCiAgcmVzb3VyY2VzOgogICAgcmVxdWVzdHM6CiAgICAgIHN0b3JhZ2U6IDIwR2kKLS0tCmFwaVZlcnNpb246IHYxCmtpbmQ6IFBvZAptZXRhZGF0YToKICBuYW1lOiBxdWFkbGV0LWRlbW8Kc3BlYzoKICBjb250YWluZXJzOgogIC0gbmFtZTogd29yZHByZXNzCiAgICBpbWFnZTogcXVheS5pby9saW51eC1zeXN0ZW0tcm9sZXMvd29yZHByZXNzOjQuOC1hcGFjaGUKICAgIGVudjoKICAgIC0gbmFtZTogV09SRFBSRVNTX0RCX0hPU1QKICAgICAgdmFsdWU6IHF1YWRsZXQtZGVtby1teXNxbAogICAgLSBuYW1lOiBXT1JEUFJFU1NfREJfUEFTU1dPUkQKICAgICAgdmFsdWVGcm9tOgogICAgICAgIHNlY3JldEtleVJlZjoKICAgICAgICAgIG5hbWU6IG15c3FsLXJvb3QtcGFzc3dvcmQta3ViZQogICAgICAgICAga2V5OiBwYXNzd29yZAogICAgdm9sdW1lTW91bnRzOgogICAgLSBuYW1lOiB3b3JkcHJlc3MtcGVyc2lzdGVudC1zdG9yYWdlCiAgICAgIG1vdW50UGF0aDogL3Zhci93d3cvaHRtbAogICAgcmVzb3VyY2VzOgogICAgICByZXF1ZXN0czoKICAgICAgICBtZW1vcnk6ICI2NE1pIgogICAgICAgIGNwdTogIjI1MG0iCiAgICAgIGxpbWl0czoKICAgICAgICBtZW1vcnk6ICIxMjhNaSIKICAgICAgICBjcHU6ICI1MDBtIgogIC0gbmFtZTogZW52b3kKICAgIGltYWdlOiBxdWF5LmlvL2xpbnV4LXN5c3RlbS1yb2xlcy9lbnZveXByb3h5OnYxLjI1LjAKICAgIHZvbHVtZU1vdW50czoKICAgIC0gbmFtZTogY29uZmlnLXZvbHVtZQogICAgICBtb3VudFBhdGg6IC9ldGMvZW52b3kKICAgIC0gbmFtZTogY2VydGlmaWNhdGVzCiAgICAgIG1vdW50UGF0aDogL2V0Yy9lbnZveS1jZXJ0aWZpY2F0ZXMKICAgIGVudjoKICAgIC0gbmFtZTogRU5WT1lfVUlECiAgICAgIHZhbHVlOiAiMCIKICAgIHJlc291cmNlczoKICAgICAgcmVxdWVzdHM6CiAgICAgICAgbWVtb3J5OiAiNjRNaSIKICAgICAgICBjcHU6ICIyNTBtIgogICAgICBsaW1pdHM6CiAgICAgICAgbWVtb3J5OiAiMTI4TWkiCiAgICAgICAgY3B1OiAiNTAwbSIKICB2b2x1bWVzOgogIC0gbmFtZTogY29uZmlnLXZvbHVtZQogICAgY29uZmlnTWFwOgogICAgICBuYW1lOiBlbnZveS1wcm94eS1jb25maWcKICAtIG5hbWU6IGNlcnRpZmljYXRlcwogICAgc2VjcmV0OgogICAgICBzZWNyZXROYW1lOiBlbnZveS1jZXJ0aWZpY2F0ZXMKICAtIG5hbWU6IHdvcmRwcmVzcy1wZXJzaXN0ZW50LXN0b3JhZ2UKICAgIHBlcnNpc3RlbnRWb2x1bWVDbGFpbToKICAgICAgY2xhaW1OYW1lOiB3cC1wdi1jbGFpbQogIC0gbmFtZTogd3d3ICAjIG5vdCB1c2VkIC0gZm9yIHRlc3RpbmcgaG9zdHBhdGgKICAgIGhvc3RQYXRoOgogICAgICBwYXRoOiAvdG1wL2h0dHBkMwogIC0gbmFtZTogY3JlYXRlICAjIG5vdCB1c2VkIC0gZm9yIHRlc3RpbmcgaG9zdHBhdGgKICAgIGhvc3RQYXRoOgogICAgICBwYXRoOiAvdG1wL2h0dHBkMy1jcmVhdGUK",
"encoding": "base64",
"source": "/etc/containers/systemd/quadlet-demo.yml"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Saturday 08 February 2025 11:39:25 -0500 (0:00:00.404) 0:01:00.329 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [
"quay.io/linux-system-roles/wordpress:4.8-apache",
"quay.io/linux-system-roles/envoyproxy:v1.25.0"
],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.kube",
"__podman_volumes": [
"/tmp/httpd3",
"/tmp/httpd3-create"
]
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105
Saturday 08 February 2025 11:39:25 -0500 (0:00:00.151) 0:01:00.480 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112
Saturday 08 February 2025 11:39:25 -0500 (0:00:00.061) 0:01:00.542 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state == \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116
Saturday 08 February 2025 11:39:25 -0500 (0:00:00.045) 0:01:00.587 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2
Saturday 08 February 2025 11:39:25 -0500 (0:00:00.081) 0:01:00.668 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 08 February 2025 11:39:25 -0500 (0:00:00.061) 0:01:00.730 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 08 February 2025 11:39:25 -0500 (0:00:00.031) 0:01:00.762 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 08 February 2025 11:39:25 -0500 (0:00:00.029) 0:01:00.791 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7
Saturday 08 February 2025 11:39:25 -0500 (0:00:00.027) 0:01:00.819 *****
changed: [managed-node1] => (item=/tmp/httpd3) => {
"ansible_loop_var": "item",
"changed": true,
"gid": 0,
"group": "root",
"item": "/tmp/httpd3",
"mode": "0755",
"owner": "root",
"path": "/tmp/httpd3",
"secontext": "unconfined_u:object_r:user_tmp_t:s0",
"size": 6,
"state": "directory",
"uid": 0
}
changed: [managed-node1] => (item=/tmp/httpd3-create) => {
"ansible_loop_var": "item",
"changed": true,
"gid": 0,
"group": "root",
"item": "/tmp/httpd3-create",
"mode": "0755",
"owner": "root",
"path": "/tmp/httpd3-create",
"secontext": "unconfined_u:object_r:user_tmp_t:s0",
"size": 6,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
Saturday 08 February 2025 11:39:26 -0500 (0:00:00.737) 0:01:01.556 *****
changed: [managed-node1] => (item=None) => {
"attempts": 1,
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
changed: [managed-node1] => (item=None) => {
"attempts": 1,
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39
Saturday 08 February 2025 11:39:42 -0500 (0:00:15.910) 0:01:17.467 *****
ok: [managed-node1] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/etc/containers/systemd",
"secontext": "system_u:object_r:etc_t:s0",
"size": 160,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48
Saturday 08 February 2025 11:39:42 -0500 (0:00:00.403) 0:01:17.871 *****
changed: [managed-node1] => {
"changed": true,
"checksum": "7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7",
"dest": "/etc/containers/systemd/quadlet-demo.kube",
"gid": 0,
"group": "root",
"md5sum": "da53c88f92b68b0487aa209f795b6bb3",
"mode": "0644",
"owner": "root",
"secontext": "system_u:object_r:etc_t:s0",
"size": 456,
"src": "/root/.ansible/tmp/ansible-tmp-1739032782.9002678-22772-261467899926975/.source.kube",
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58
Saturday 08 February 2025 11:39:43 -0500 (0:00:00.675) 0:01:18.546 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] *******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70
Saturday 08 February 2025 11:39:43 -0500 (0:00:00.034) 0:01:18.581 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_copy_file is skipped",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Reload systemctl] *********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82
Saturday 08 February 2025 11:39:43 -0500 (0:00:00.047) 0:01:18.628 *****
ok: [managed-node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Start service] ************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110
Saturday 08 February 2025 11:39:44 -0500 (0:00:00.801) 0:01:19.430 *****
changed: [managed-node1] => {
"changed": true,
"name": "quadlet-demo.service",
"state": "started",
"status": {
"AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0",
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "sysinit.target quadlet-demo-mysql.service -.mount network-online.target systemd-journald.socket quadlet-demo-network.service basic.target system.slice",
"AllowIsolate": "no",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "multi-user.target shutdown.target",
"BindLogSockets": "no",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "yes",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanLiveMount": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
"CleanResult": "success",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlGroupId": "0",
"ControlPID": "0",
"CoredumpFilter": "0x33",
"CoredumpReceive": "no",
"DebugInvocation": "no",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"DefaultStartupMemoryLow": "0",
"Delegate": "no",
"Description": "quadlet-demo.service",
"DevicePolicy": "auto",
"DynamicUser": "no",
"EffectiveMemoryHigh": "3698229248",
"EffectiveMemoryMax": "3698229248",
"EffectiveTasksMax": "22364",
"Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo.service",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainHandoffTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExitType": "main",
"ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FileDescriptorStorePreserve": "restart",
"FinalKillSignal": "9",
"FragmentPath": "/run/systemd/generator/quadlet-demo.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOReadBytes": "[not set]",
"IOReadOperations": "[not set]",
"IOSchedulingClass": "2",
"IOSchedulingPriority": "4",
"IOWeight": "[not set]",
"IOWriteBytes": "[not set]",
"IOWriteOperations": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "[no data]",
"IPEgressPackets": "[no data]",
"IPIngressBytes": "[no data]",
"IPIngressPackets": "[no data]",
"Id": "quadlet-demo.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "infinity",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "8388608",
"LimitMEMLOCKSoft": "8388608",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "524288",
"LimitNOFILESoft": "1024",
"LimitNPROC": "13977",
"LimitNPROCSoft": "13977",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "13977",
"LimitSIGPENDINGSoft": "13977",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LiveMountResult": "success",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"ManagedOOMMemoryPressure": "auto",
"ManagedOOMMemoryPressureDurationUSec": "[not set]",
"ManagedOOMMemoryPressureLimit": "0",
"ManagedOOMPreference": "none",
"ManagedOOMSwap": "auto",
"MemoryAccounting": "yes",
"MemoryAvailable": "2484596736",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryKSM": "no",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemoryPeak": "[not set]",
"MemoryPressureThresholdUSec": "200ms",
"MemoryPressureWatch": "auto",
"MemorySwapCurrent": "[not set]",
"MemorySwapMax": "infinity",
"MemorySwapPeak": "[not set]",
"MemoryZSwapCurrent": "[not set]",
"MemoryZSwapMax": "infinity",
"MemoryZSwapWriteback": "yes",
"MountAPIVFS": "no",
"MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAPolicy": "n/a",
"Names": "quadlet-demo.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "all",
"OOMPolicy": "stop",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"OnSuccessJobMode": "fail",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateIPC": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivatePIDs": "no",
"PrivateTmp": "no",
"PrivateTmpEx": "no",
"PrivateUsers": "no",
"PrivateUsersEx": "no",
"ProcSubset": "all",
"ProtectClock": "no",
"ProtectControlGroups": "no",
"ProtectControlGroupsEx": "no",
"ProtectHome": "no",
"ProtectHostname": "no",
"ProtectKernelLogs": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectProc": "default",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"ReloadResult": "success",
"ReloadSignal": "1",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "quadlet-demo-mysql.service quadlet-demo-network.service system.slice sysinit.target -.mount",
"RequiresMountsFor": "/run/containers",
"Restart": "no",
"RestartKillSignal": "15",
"RestartMaxDelayUSec": "infinity",
"RestartMode": "normal",
"RestartSteps": "0",
"RestartUSec": "100ms",
"RestartUSecNext": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RootEphemeral": "no",
"RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"RuntimeRandomizedExtraUSec": "0",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"SetLoginEnvironment": "no",
"Slice": "system.slice",
"SourcePath": "/etc/containers/systemd/quadlet-demo.kube",
"StandardError": "inherit",
"StandardInput": "null",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StartupMemoryHigh": "infinity",
"StartupMemoryLow": "0",
"StartupMemoryMax": "infinity",
"StartupMemorySwapMax": "infinity",
"StartupMemoryZSwapMax": "infinity",
"StateChangeTimestampMonotonic": "0",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SurviveFinalKillSignal": "no",
"SyslogFacility": "3",
"SyslogIdentifier": "quadlet-demo",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "2147483646",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "22364",
"TimeoutAbortUSec": "1min 30s",
"TimeoutCleanUSec": "infinity",
"TimeoutStartFailureMode": "terminate",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopFailureMode": "terminate",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "notify",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "generated",
"UtmpMode": "init",
"WantedBy": "multi-user.target",
"Wants": "network-online.target",
"WatchdogSignal": "6",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "infinity"
}
}
TASK [fedora.linux_system_roles.podman : Restart service] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125
Saturday 08 February 2025 11:39:45 -0500 (0:00:01.172) 0:01:20.602 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_service_started is changed",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Cancel linger] ************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196
Saturday 08 February 2025 11:39:45 -0500 (0:00:00.081) 0:01:20.683 *****
skipping: [managed-node1] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.podman : Handle credential files - absent] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202
Saturday 08 February 2025 11:39:45 -0500 (0:00:00.037) 0:01:20.721 *****
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211
Saturday 08 February 2025 11:39:45 -0500 (0:00:00.029) 0:01:20.751 *****
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [Check quadlet files] *****************************************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:96
Saturday 08 February 2025 11:39:45 -0500 (0:00:00.041) 0:01:20.792 *****
ok: [managed-node1] => {
"changed": false,
"cmd": [
"ls",
"-alrtF",
"/etc/containers/systemd"
],
"delta": "0:00:00.007562",
"end": "2025-02-08 11:39:46.143002",
"rc": 0,
"start": "2025-02-08 11:39:46.135440"
}
STDOUT:
total 24
drwxr-xr-x. 9 root root 178 Feb 8 11:35 ../
-rw-r--r--. 1 root root 74 Feb 8 11:38 quadlet-demo.network
-rw-r--r--. 1 root root 9 Feb 8 11:39 quadlet-demo-mysql.volume
-rw-r--r--. 1 root root 363 Feb 8 11:39 quadlet-demo-mysql.container
-rw-r--r--. 1 root root 2102 Feb 8 11:39 envoy-proxy-configmap.yml
-rw-r--r--. 1 root root 1605 Feb 8 11:39 quadlet-demo.yml
-rw-r--r--. 1 root root 456 Feb 8 11:39 quadlet-demo.kube
drwxr-xr-x. 2 root root 185 Feb 8 11:39 ./
TASK [Check containers] ********************************************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:100
Saturday 08 February 2025 11:39:46 -0500 (0:00:00.438) 0:01:21.231 *****
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"ps",
"-a"
],
"delta": "0:00:00.046109",
"end": "2025-02-08 11:39:46.615359",
"failed_when_result": false,
"rc": 0,
"start": "2025-02-08 11:39:46.569250"
}
STDOUT:
CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
93148221a3fd quay.io/libpod/registry:2.8.2 /etc/docker/regis... 5 minutes ago Up 5 minutes 127.0.0.1:5000->5000/tcp podman_registry
05159afa0f59 quay.io/linux-system-roles/mysql:5.6 mysqld 32 seconds ago Up 32 seconds (healthy) 3306/tcp quadlet-demo-mysql
ba15754f1ee0 localhost/podman-pause:5.3.1-1733097600 1 second ago Up 1 second a96f3a51b8d1-service
d3d31a88915c localhost/podman-pause:5.3.1-1733097600 1 second ago Up 1 second 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp 5bb5b766f277-infra
b2c27c149598 quay.io/linux-system-roles/wordpress:4.8-apache apache2-foregroun... 1 second ago Up 1 second 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp, 80/tcp quadlet-demo-wordpress
1fc390fa3ff4 quay.io/linux-system-roles/envoyproxy:v1.25.0 envoy -c /etc/env... 1 second ago Up 1 second 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp, 10000/tcp quadlet-demo-envoy
TASK [Check volumes] ***********************************************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:105
Saturday 08 February 2025 11:39:46 -0500 (0:00:00.480) 0:01:21.711 *****
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"volume",
"ls"
],
"delta": "0:00:00.029382",
"end": "2025-02-08 11:39:47.058521",
"failed_when_result": false,
"rc": 0,
"start": "2025-02-08 11:39:47.029139"
}
STDOUT:
DRIVER VOLUME NAME
local 2ce71b63066ed4402ab34684197d9d1868b43cd94dbf701d92e842ee487e7893
local systemd-quadlet-demo-mysql
local wp-pv-claim
local envoy-proxy-config
local envoy-certificates
TASK [Check pods] **************************************************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:110
Saturday 08 February 2025 11:39:47 -0500 (0:00:00.425) 0:01:22.136 *****
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"pod",
"ps",
"--ctr-ids",
"--ctr-names",
"--ctr-status"
],
"delta": "0:00:00.035880",
"end": "2025-02-08 11:39:47.446481",
"failed_when_result": false,
"rc": 0,
"start": "2025-02-08 11:39:47.410601"
}
STDOUT:
POD ID NAME STATUS CREATED INFRA ID IDS NAMES STATUS
5bb5b766f277 quadlet-demo Running 2 seconds ago d3d31a88915c d3d31a88915c,b2c27c149598,1fc390fa3ff4 5bb5b766f277-infra,quadlet-demo-wordpress,quadlet-demo-envoy running,running,running
TASK [Check systemd] ***********************************************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:115
Saturday 08 February 2025 11:39:47 -0500 (0:00:00.390) 0:01:22.527 *****
ok: [managed-node1] => {
"changed": false,
"cmd": "set -euo pipefail; systemctl list-units | grep quadlet",
"delta": "0:00:00.014382",
"end": "2025-02-08 11:39:47.818330",
"failed_when_result": false,
"rc": 0,
"start": "2025-02-08 11:39:47.803948"
}
STDOUT:
quadlet-demo-mysql-volume.service loaded active exited quadlet-demo-mysql-volume.service
quadlet-demo-mysql.service loaded active running quadlet-demo-mysql.service
quadlet-demo-network.service loaded active exited quadlet-demo-network.service
quadlet-demo.service loaded active running quadlet-demo.service
TASK [Check web] ***************************************************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:121
Saturday 08 February 2025 11:39:47 -0500 (0:00:00.370) 0:01:22.897 *****
changed: [managed-node1] => {
"attempts": 1,
"changed": true,
"checksum_dest": null,
"checksum_src": "d1ac587ee4653b36ed40791b2bca2a83cf8cb157",
"dest": "/run/out",
"elapsed": 0,
"gid": 0,
"group": "root",
"md5sum": "95e8238992037c7b6b6decebba46e982",
"mode": "0600",
"owner": "root",
"secontext": "system_u:object_r:var_run_t:s0",
"size": 11666,
"src": "/root/.ansible/tmp/ansible-tmp-1739032787.9224231-22937-212514716150176/tmp799n6oij",
"state": "file",
"status_code": 200,
"uid": 0,
"url": "https://localhost:8000"
}
MSG:
OK (unknown bytes)
TASK [Show web] ****************************************************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:132
Saturday 08 February 2025 11:39:49 -0500 (0:00:01.242) 0:01:24.140 *****
ok: [managed-node1] => {
"changed": false,
"cmd": [
"cat",
"/run/out"
],
"delta": "0:00:00.002753",
"end": "2025-02-08 11:39:49.428631",
"rc": 0,
"start": "2025-02-08 11:39:49.425878"
}
STDOUT:
WordPress › Installation
WordPress
TASK [Error] *******************************************************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:137
Saturday 08 February 2025 11:39:49 -0500 (0:00:00.369) 0:01:24.510 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__web_status is failed",
"skip_reason": "Conditional result was False"
}
TASK [Check] *******************************************************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:148
Saturday 08 February 2025 11:39:49 -0500 (0:00:00.029) 0:01:24.539 *****
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"ps",
"-a"
],
"delta": "0:00:00.037076",
"end": "2025-02-08 11:39:49.855103",
"rc": 0,
"start": "2025-02-08 11:39:49.818027"
}
STDOUT:
CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
93148221a3fd quay.io/libpod/registry:2.8.2 /etc/docker/regis... 5 minutes ago Up 5 minutes 127.0.0.1:5000->5000/tcp podman_registry
05159afa0f59 quay.io/linux-system-roles/mysql:5.6 mysqld 35 seconds ago Up 35 seconds (healthy) 3306/tcp quadlet-demo-mysql
ba15754f1ee0 localhost/podman-pause:5.3.1-1733097600 4 seconds ago Up 4 seconds a96f3a51b8d1-service
d3d31a88915c localhost/podman-pause:5.3.1-1733097600 4 seconds ago Up 4 seconds 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp 5bb5b766f277-infra
b2c27c149598 quay.io/linux-system-roles/wordpress:4.8-apache apache2-foregroun... 4 seconds ago Up 4 seconds 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp, 80/tcp quadlet-demo-wordpress
1fc390fa3ff4 quay.io/linux-system-roles/envoyproxy:v1.25.0 envoy -c /etc/env... 4 seconds ago Up 4 seconds 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp, 10000/tcp quadlet-demo-envoy
TASK [Check pods] **************************************************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:152
Saturday 08 February 2025 11:39:49 -0500 (0:00:00.395) 0:01:24.935 *****
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"pod",
"ps",
"--ctr-ids",
"--ctr-names",
"--ctr-status"
],
"delta": "0:00:00.034789",
"end": "2025-02-08 11:39:50.248641",
"failed_when_result": false,
"rc": 0,
"start": "2025-02-08 11:39:50.213852"
}
STDOUT:
POD ID NAME STATUS CREATED INFRA ID IDS NAMES STATUS
5bb5b766f277 quadlet-demo Running 5 seconds ago d3d31a88915c d3d31a88915c,b2c27c149598,1fc390fa3ff4 5bb5b766f277-infra,quadlet-demo-wordpress,quadlet-demo-envoy running,running,running
TASK [Check systemd] ***********************************************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:157
Saturday 08 February 2025 11:39:50 -0500 (0:00:00.393) 0:01:25.329 *****
ok: [managed-node1] => {
"changed": false,
"cmd": "set -euo pipefail; systemctl list-units --all | grep quadlet",
"delta": "0:00:00.014299",
"end": "2025-02-08 11:39:50.633847",
"failed_when_result": false,
"rc": 0,
"start": "2025-02-08 11:39:50.619548"
}
STDOUT:
quadlet-demo-mysql-volume.service loaded active exited quadlet-demo-mysql-volume.service
quadlet-demo-mysql.service loaded active running quadlet-demo-mysql.service
quadlet-demo-network.service loaded active exited quadlet-demo-network.service
quadlet-demo.service loaded active running quadlet-demo.service
TASK [LS] **********************************************************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:165
Saturday 08 February 2025 11:39:50 -0500 (0:00:00.384) 0:01:25.713 *****
ok: [managed-node1] => {
"changed": false,
"cmd": [
"ls",
"-alrtF",
"/etc/systemd/system"
],
"delta": "0:00:00.004248",
"end": "2025-02-08 11:39:51.036468",
"failed_when_result": false,
"rc": 0,
"start": "2025-02-08 11:39:51.032220"
}
STDOUT:
total 12
drwxr-xr-x. 5 root root 47 Feb 4 09:36 ../
drwxr-xr-x. 2 root root 32 Feb 4 09:36 getty.target.wants/
lrwxrwxrwx. 1 root root 43 Feb 4 09:36 dbus.service -> /usr/lib/systemd/system/dbus-broker.service
lrwxrwxrwx. 1 root root 37 Feb 4 09:36 ctrl-alt-del.target -> /usr/lib/systemd/system/reboot.target
lrwxrwxrwx. 1 root root 57 Feb 4 09:36 dbus-org.freedesktop.nm-dispatcher.service -> /usr/lib/systemd/system/NetworkManager-dispatcher.service
drwxr-xr-x. 2 root root 48 Feb 4 09:36 network-online.target.wants/
drwxr-xr-x. 2 root root 76 Feb 4 09:36 timers.target.wants/
drwxr-xr-x. 2 root root 38 Feb 4 09:37 dev-virtio\x2dports-org.qemu.guest_agent.0.device.wants/
lrwxrwxrwx. 1 root root 41 Feb 4 09:39 default.target -> /usr/lib/systemd/system/multi-user.target
drwxr-xr-x. 2 root root 31 Feb 4 09:51 remote-fs.target.wants/
drwxr-xr-x. 2 root root 119 Feb 4 09:52 cloud-init.target.wants/
drwxr-xr-x. 2 root root 4096 Feb 4 09:53 sysinit.target.wants/
drwxr-xr-x. 2 root root 113 Feb 8 11:34 sockets.target.wants/
drwxr-xr-x. 2 root root 4096 Feb 8 11:38 multi-user.target.wants/
lrwxrwxrwx. 1 root root 41 Feb 8 11:38 dbus-org.fedoraproject.FirewallD1.service -> /usr/lib/systemd/system/firewalld.service
drwxr-xr-x. 11 root root 4096 Feb 8 11:38 ./
TASK [Cleanup] *****************************************************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:172
Saturday 08 February 2025 11:39:51 -0500 (0:00:00.405) 0:01:26.119 *****
included: fedora.linux_system_roles.podman for managed-node1
TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3
Saturday 08 February 2025 11:39:51 -0500 (0:00:00.074) 0:01:26.193 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] ****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3
Saturday 08 February 2025 11:39:51 -0500 (0:00:00.049) 0:01:26.243 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11
Saturday 08 February 2025 11:39:51 -0500 (0:00:00.139) 0:01:26.382 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_is_ostree is defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16
Saturday 08 February 2025 11:39:51 -0500 (0:00:00.052) 0:01:26.435 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_is_ostree is defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23
Saturday 08 February 2025 11:39:51 -0500 (0:00:00.050) 0:01:26.485 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_is_transactional is defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28
Saturday 08 February 2025 11:39:51 -0500 (0:00:00.051) 0:01:26.537 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_is_transactional is defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32
Saturday 08 February 2025 11:39:51 -0500 (0:00:00.038) 0:01:26.575 *****
ok: [managed-node1] => (item=RedHat.yml) => {
"ansible_facts": {
"__podman_packages": [
"podman",
"shadow-utils-subid"
]
},
"ansible_included_var_files": [
"/tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml"
],
"ansible_loop_var": "item",
"changed": false,
"item": "RedHat.yml"
}
skipping: [managed-node1] => (item=CentOS.yml) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "__vars_file is file",
"item": "CentOS.yml",
"skip_reason": "Conditional result was False"
}
ok: [managed-node1] => (item=CentOS_10.yml) => {
"ansible_facts": {
"__podman_packages": [
"iptables-nft",
"podman",
"shadow-utils-subid"
]
},
"ansible_included_var_files": [
"/tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml"
],
"ansible_loop_var": "item",
"changed": false,
"item": "CentOS_10.yml"
}
ok: [managed-node1] => (item=CentOS_10.yml) => {
"ansible_facts": {
"__podman_packages": [
"iptables-nft",
"podman",
"shadow-utils-subid"
]
},
"ansible_included_var_files": [
"/tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml"
],
"ansible_loop_var": "item",
"changed": false,
"item": "CentOS_10.yml"
}
TASK [fedora.linux_system_roles.podman : Gather the package facts] *************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6
Saturday 08 February 2025 11:39:51 -0500 (0:00:00.077) 0:01:26.653 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Enable copr if requested] *************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10
Saturday 08 February 2025 11:39:52 -0500 (0:00:00.911) 0:01:27.564 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_use_copr | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14
Saturday 08 February 2025 11:39:52 -0500 (0:00:00.042) 0:01:27.607 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "(__podman_packages | difference(ansible_facts.packages))",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28
Saturday 08 February 2025 11:39:52 -0500 (0:00:00.038) 0:01:27.645 *****
skipping: [managed-node1] => {
"false_condition": "__podman_is_transactional | d(false)"
}
TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33
Saturday 08 February 2025 11:39:52 -0500 (0:00:00.033) 0:01:27.679 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_is_transactional | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38
Saturday 08 February 2025 11:39:52 -0500 (0:00:00.035) 0:01:27.714 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_is_transactional | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get podman version] *******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46
Saturday 08 February 2025 11:39:52 -0500 (0:00:00.031) 0:01:27.746 *****
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"--version"
],
"delta": "0:00:00.024428",
"end": "2025-02-08 11:39:53.053493",
"rc": 0,
"start": "2025-02-08 11:39:53.029065"
}
STDOUT:
podman version 5.3.1
TASK [fedora.linux_system_roles.podman : Set podman version] *******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52
Saturday 08 February 2025 11:39:53 -0500 (0:00:00.411) 0:01:28.157 *****
ok: [managed-node1] => {
"ansible_facts": {
"podman_version": "5.3.1"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56
Saturday 08 February 2025 11:39:53 -0500 (0:00:00.043) 0:01:28.201 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_version is version(\"4.2\", \"<\")",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63
Saturday 08 February 2025 11:39:53 -0500 (0:00:00.033) 0:01:28.234 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_version is version(\"4.4\", \"<\")",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73
Saturday 08 February 2025 11:39:53 -0500 (0:00:00.039) 0:01:28.274 *****
META: end_host conditional evaluated to False, continuing execution for managed-node1
skipping: [managed-node1] => {
"skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1"
}
MSG:
end_host conditional evaluated to false, continuing execution for managed-node1
TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80
Saturday 08 February 2025 11:39:53 -0500 (0:00:00.046) 0:01:28.321 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96
Saturday 08 February 2025 11:39:53 -0500 (0:00:00.092) 0:01:28.414 *****
META: end_host conditional evaluated to False, continuing execution for managed-node1
skipping: [managed-node1] => {
"skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1"
}
MSG:
end_host conditional evaluated to false, continuing execution for managed-node1
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109
Saturday 08 February 2025 11:39:53 -0500 (0:00:00.048) 0:01:28.463 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 08 February 2025 11:39:53 -0500 (0:00:00.058) 0:01:28.522 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 08 February 2025 11:39:53 -0500 (0:00:00.032) 0:01:28.555 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 08 February 2025 11:39:53 -0500 (0:00:00.033) 0:01:28.588 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Saturday 08 February 2025 11:39:53 -0500 (0:00:00.038) 0:01:28.627 *****
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1739032452.5561087,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
"ctime": 1739032446.715072,
"dev": 51714,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 9150722,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-pie-executable",
"mode": "0755",
"mtime": 1730678400.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 15744,
"uid": 0,
"version": "796201794",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Saturday 08 February 2025 11:39:54 -0500 (0:00:00.396) 0:01:29.024 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Saturday 08 February 2025 11:39:54 -0500 (0:00:00.035) 0:01:29.059 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Saturday 08 February 2025 11:39:54 -0500 (0:00:00.031) 0:01:29.091 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Saturday 08 February 2025 11:39:54 -0500 (0:00:00.030) 0:01:29.122 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Saturday 08 February 2025 11:39:54 -0500 (0:00:00.030) 0:01:29.152 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Saturday 08 February 2025 11:39:54 -0500 (0:00:00.029) 0:01:29.182 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Saturday 08 February 2025 11:39:54 -0500 (0:00:00.031) 0:01:29.214 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Saturday 08 February 2025 11:39:54 -0500 (0:00:00.029) 0:01:29.243 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set config file paths] ****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115
Saturday 08 February 2025 11:39:54 -0500 (0:00:00.030) 0:01:29.274 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf",
"__podman_policy_json_file": "/etc/containers/policy.json",
"__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf",
"__podman_storage_conf_file": "/etc/containers/storage.conf"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle container.conf.d] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124
Saturday 08 February 2025 11:39:54 -0500 (0:00:00.050) 0:01:29.325 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] ***********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5
Saturday 08 February 2025 11:39:54 -0500 (0:00:00.076) 0:01:29.402 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_containers_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Update container config file] *********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13
Saturday 08 February 2025 11:39:54 -0500 (0:00:00.035) 0:01:29.437 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_containers_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] *************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127
Saturday 08 February 2025 11:39:54 -0500 (0:00:00.035) 0:01:29.473 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] ***********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5
Saturday 08 February 2025 11:39:54 -0500 (0:00:00.103) 0:01:29.577 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_registries_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Update registries config file] ********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13
Saturday 08 February 2025 11:39:54 -0500 (0:00:00.032) 0:01:29.609 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_registries_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Handle storage.conf] ******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130
Saturday 08 February 2025 11:39:54 -0500 (0:00:00.029) 0:01:29.638 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5
Saturday 08 February 2025 11:39:54 -0500 (0:00:00.057) 0:01:29.696 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_storage_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Update storage config file] ***********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13
Saturday 08 February 2025 11:39:54 -0500 (0:00:00.029) 0:01:29.725 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_storage_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Handle policy.json] *******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133
Saturday 08 February 2025 11:39:54 -0500 (0:00:00.029) 0:01:29.754 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6
Saturday 08 February 2025 11:39:54 -0500 (0:00:00.064) 0:01:29.819 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_policy_json | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14
Saturday 08 February 2025 11:39:54 -0500 (0:00:00.057) 0:01:29.876 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_policy_json | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get the existing policy.json] *********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19
Saturday 08 February 2025 11:39:54 -0500 (0:00:00.038) 0:01:29.914 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_policy_json | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Write new policy.json file] ***********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25
Saturday 08 February 2025 11:39:54 -0500 (0:00:00.032) 0:01:29.946 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_policy_json | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [Manage firewall for specified ports] *************************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139
Saturday 08 February 2025 11:39:54 -0500 (0:00:00.034) 0:01:29.981 *****
included: fedora.linux_system_roles.firewall for managed-node1
TASK [fedora.linux_system_roles.firewall : Setup firewalld] ********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2
Saturday 08 February 2025 11:39:55 -0500 (0:00:00.115) 0:01:30.096 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node1
TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2
Saturday 08 February 2025 11:39:55 -0500 (0:00:00.052) 0:01:30.149 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Check if system is ostree] **********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10
Saturday 08 February 2025 11:39:55 -0500 (0:00:00.034) 0:01:30.184 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __firewall_is_ostree is defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15
Saturday 08 February 2025 11:39:55 -0500 (0:00:00.027) 0:01:30.211 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __firewall_is_ostree is defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22
Saturday 08 February 2025 11:39:55 -0500 (0:00:00.064) 0:01:30.276 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __firewall_is_transactional is defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27
Saturday 08 February 2025 11:39:55 -0500 (0:00:00.030) 0:01:30.307 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __firewall_is_transactional is defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Install firewalld] ******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31
Saturday 08 February 2025 11:39:55 -0500 (0:00:00.034) 0:01:30.341 *****
ok: [managed-node1] => {
"changed": false,
"rc": 0,
"results": []
}
MSG:
Nothing to do
lsrpackages: firewalld
TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:43
Saturday 08 February 2025 11:39:56 -0500 (0:00:00.798) 0:01:31.140 *****
skipping: [managed-node1] => {
"false_condition": "__firewall_is_transactional | d(false)"
}
TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:48
Saturday 08 February 2025 11:39:56 -0500 (0:00:00.031) 0:01:31.171 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_is_transactional | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:53
Saturday 08 February 2025 11:39:56 -0500 (0:00:00.029) 0:01:31.201 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_is_transactional | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Collect service facts] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5
Saturday 08 February 2025 11:39:56 -0500 (0:00:00.030) 0:01:31.231 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:9
Saturday 08 February 2025 11:39:56 -0500 (0:00:00.027) 0:01:31.258 *****
skipping: [managed-node1] => (item=nftables) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "nftables",
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item=iptables) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "iptables",
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item=ufw) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "ufw",
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => {
"changed": false
}
MSG:
All items skipped
TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] ***********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22
Saturday 08 February 2025 11:39:56 -0500 (0:00:00.039) 0:01:31.298 *****
ok: [managed-node1] => {
"changed": false,
"name": "firewalld",
"status": {
"AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0",
"ActiveEnterTimestamp": "Sat 2025-02-08 11:38:46 EST",
"ActiveEnterTimestampMonotonic": "659621173",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "active",
"After": "dbus-broker.service polkit.service dbus.socket system.slice sysinit.target basic.target",
"AllowIsolate": "no",
"AssertResult": "yes",
"AssertTimestamp": "Sat 2025-02-08 11:38:46 EST",
"AssertTimestampMonotonic": "659364419",
"Before": "network-pre.target multi-user.target shutdown.target",
"BindLogSockets": "no",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"BusName": "org.fedoraproject.FirewallD1",
"CPUAccounting": "yes",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "463787000",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanLiveMount": "yes",
"CanReload": "yes",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
"CleanResult": "success",
"CollectMode": "inactive",
"ConditionResult": "yes",
"ConditionTimestamp": "Sat 2025-02-08 11:38:46 EST",
"ConditionTimestampMonotonic": "659364415",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "iptables.service shutdown.target ip6tables.service ebtables.service ipset.service",
"ControlGroup": "/system.slice/firewalld.service",
"ControlGroupId": "142113",
"ControlPID": "0",
"CoredumpFilter": "0x33",
"CoredumpReceive": "no",
"DebugInvocation": "no",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"DefaultStartupMemoryLow": "0",
"Delegate": "no",
"Description": "firewalld - dynamic firewall daemon",
"DeviceAllow": "char-rtc r",
"DevicePolicy": "closed",
"Documentation": "\"man:firewalld(1)\"",
"DynamicUser": "no",
"EffectiveCPUs": "0-1",
"EffectiveMemoryHigh": "3698229248",
"EffectiveMemoryMax": "3698229248",
"EffectiveMemoryNodes": "0",
"EffectiveTasksMax": "22364",
"EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainHandoffTimestamp": "Sat 2025-02-08 11:38:46 EST",
"ExecMainHandoffTimestampMonotonic": "659396435",
"ExecMainPID": "63495",
"ExecMainStartTimestamp": "Sat 2025-02-08 11:38:46 EST",
"ExecMainStartTimestampMonotonic": "659367217",
"ExecMainStatus": "0",
"ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExitType": "main",
"ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FileDescriptorStorePreserve": "restart",
"FinalKillSignal": "9",
"FragmentPath": "/usr/lib/systemd/system/firewalld.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOReadBytes": "[not set]",
"IOReadOperations": "[not set]",
"IOSchedulingClass": "2",
"IOSchedulingPriority": "4",
"IOWeight": "[not set]",
"IOWriteBytes": "[not set]",
"IOWriteOperations": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "[no data]",
"IPEgressPackets": "[no data]",
"IPIngressBytes": "[no data]",
"IPIngressPackets": "[no data]",
"Id": "firewalld.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestamp": "Sat 2025-02-08 11:38:46 EST",
"InactiveExitTimestampMonotonic": "659367876",
"InvocationID": "b114cb06f17c41228ed9f6185c1c9f7d",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "infinity",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "8388608",
"LimitMEMLOCKSoft": "8388608",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "524288",
"LimitNOFILESoft": "1024",
"LimitNPROC": "13977",
"LimitNPROCSoft": "13977",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "13977",
"LimitSIGPENDINGSoft": "13977",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LiveMountResult": "success",
"LoadState": "loaded",
"LockPersonality": "yes",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "63495",
"ManagedOOMMemoryPressure": "auto",
"ManagedOOMMemoryPressureDurationUSec": "[not set]",
"ManagedOOMMemoryPressureLimit": "0",
"ManagedOOMPreference": "none",
"ManagedOOMSwap": "auto",
"MemoryAccounting": "yes",
"MemoryAvailable": "2503135232",
"MemoryCurrent": "35016704",
"MemoryDenyWriteExecute": "yes",
"MemoryHigh": "infinity",
"MemoryKSM": "no",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemoryPeak": "36720640",
"MemoryPressureThresholdUSec": "200ms",
"MemoryPressureWatch": "auto",
"MemorySwapCurrent": "0",
"MemorySwapMax": "infinity",
"MemorySwapPeak": "0",
"MemoryZSwapCurrent": "0",
"MemoryZSwapMax": "infinity",
"MemoryZSwapWriteback": "yes",
"MountAPIVFS": "no",
"MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAPolicy": "n/a",
"Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMPolicy": "stop",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"OnSuccessJobMode": "fail",
"Perpetual": "no",
"PrivateDevices": "yes",
"PrivateIPC": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivatePIDs": "no",
"PrivateTmp": "no",
"PrivateTmpEx": "no",
"PrivateUsers": "no",
"PrivateUsersEx": "no",
"ProcSubset": "all",
"ProtectClock": "yes",
"ProtectControlGroups": "yes",
"ProtectControlGroupsEx": "yes",
"ProtectHome": "yes",
"ProtectHostname": "yes",
"ProtectKernelLogs": "yes",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectProc": "default",
"ProtectSystem": "yes",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"ReloadResult": "success",
"ReloadSignal": "1",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "dbus.socket system.slice sysinit.target",
"Restart": "no",
"RestartKillSignal": "15",
"RestartMaxDelayUSec": "infinity",
"RestartMode": "normal",
"RestartSteps": "0",
"RestartUSec": "100ms",
"RestartUSecNext": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "yes",
"RestrictSUIDSGID": "yes",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RootEphemeral": "no",
"RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"RuntimeRandomizedExtraUSec": "0",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"SetLoginEnvironment": "no",
"Slice": "system.slice",
"StandardError": "null",
"StandardInput": "null",
"StandardOutput": "null",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StartupMemoryHigh": "infinity",
"StartupMemoryLow": "0",
"StartupMemoryMax": "infinity",
"StartupMemorySwapMax": "infinity",
"StartupMemoryZSwapMax": "infinity",
"StateChangeTimestamp": "Sat 2025-02-08 11:39:44 EST",
"StateChangeTimestampMonotonic": "717514864",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "running",
"SuccessAction": "none",
"SurviveFinalKillSignal": "no",
"SyslogFacility": "3",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallArchitectures": "native",
"SystemCallErrorNumber": "2147483646",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "2",
"TasksMax": "22364",
"TimeoutAbortUSec": "1min 30s",
"TimeoutCleanUSec": "infinity",
"TimeoutStartFailureMode": "terminate",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopFailureMode": "terminate",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "dbus",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "enabled",
"UnitFileState": "enabled",
"UtmpMode": "init",
"WantedBy": "multi-user.target",
"Wants": "network-pre.target",
"WatchdogSignal": "6",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28
Saturday 08 February 2025 11:39:56 -0500 (0:00:00.572) 0:01:31.871 *****
ok: [managed-node1] => {
"changed": false,
"enabled": true,
"name": "firewalld",
"state": "started",
"status": {
"AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0",
"ActiveEnterTimestamp": "Sat 2025-02-08 11:38:46 EST",
"ActiveEnterTimestampMonotonic": "659621173",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "active",
"After": "dbus-broker.service polkit.service dbus.socket system.slice sysinit.target basic.target",
"AllowIsolate": "no",
"AssertResult": "yes",
"AssertTimestamp": "Sat 2025-02-08 11:38:46 EST",
"AssertTimestampMonotonic": "659364419",
"Before": "network-pre.target multi-user.target shutdown.target",
"BindLogSockets": "no",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"BusName": "org.fedoraproject.FirewallD1",
"CPUAccounting": "yes",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "463787000",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanLiveMount": "yes",
"CanReload": "yes",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
"CleanResult": "success",
"CollectMode": "inactive",
"ConditionResult": "yes",
"ConditionTimestamp": "Sat 2025-02-08 11:38:46 EST",
"ConditionTimestampMonotonic": "659364415",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "iptables.service shutdown.target ip6tables.service ebtables.service ipset.service",
"ControlGroup": "/system.slice/firewalld.service",
"ControlGroupId": "142113",
"ControlPID": "0",
"CoredumpFilter": "0x33",
"CoredumpReceive": "no",
"DebugInvocation": "no",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"DefaultStartupMemoryLow": "0",
"Delegate": "no",
"Description": "firewalld - dynamic firewall daemon",
"DeviceAllow": "char-rtc r",
"DevicePolicy": "closed",
"Documentation": "\"man:firewalld(1)\"",
"DynamicUser": "no",
"EffectiveCPUs": "0-1",
"EffectiveMemoryHigh": "3698229248",
"EffectiveMemoryMax": "3698229248",
"EffectiveMemoryNodes": "0",
"EffectiveTasksMax": "22364",
"EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainHandoffTimestamp": "Sat 2025-02-08 11:38:46 EST",
"ExecMainHandoffTimestampMonotonic": "659396435",
"ExecMainPID": "63495",
"ExecMainStartTimestamp": "Sat 2025-02-08 11:38:46 EST",
"ExecMainStartTimestampMonotonic": "659367217",
"ExecMainStatus": "0",
"ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExitType": "main",
"ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FileDescriptorStorePreserve": "restart",
"FinalKillSignal": "9",
"FragmentPath": "/usr/lib/systemd/system/firewalld.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOReadBytes": "[not set]",
"IOReadOperations": "[not set]",
"IOSchedulingClass": "2",
"IOSchedulingPriority": "4",
"IOWeight": "[not set]",
"IOWriteBytes": "[not set]",
"IOWriteOperations": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "[no data]",
"IPEgressPackets": "[no data]",
"IPIngressBytes": "[no data]",
"IPIngressPackets": "[no data]",
"Id": "firewalld.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestamp": "Sat 2025-02-08 11:38:46 EST",
"InactiveExitTimestampMonotonic": "659367876",
"InvocationID": "b114cb06f17c41228ed9f6185c1c9f7d",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "infinity",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "8388608",
"LimitMEMLOCKSoft": "8388608",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "524288",
"LimitNOFILESoft": "1024",
"LimitNPROC": "13977",
"LimitNPROCSoft": "13977",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "13977",
"LimitSIGPENDINGSoft": "13977",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LiveMountResult": "success",
"LoadState": "loaded",
"LockPersonality": "yes",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "63495",
"ManagedOOMMemoryPressure": "auto",
"ManagedOOMMemoryPressureDurationUSec": "[not set]",
"ManagedOOMMemoryPressureLimit": "0",
"ManagedOOMPreference": "none",
"ManagedOOMSwap": "auto",
"MemoryAccounting": "yes",
"MemoryAvailable": "2502561792",
"MemoryCurrent": "35016704",
"MemoryDenyWriteExecute": "yes",
"MemoryHigh": "infinity",
"MemoryKSM": "no",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemoryPeak": "36720640",
"MemoryPressureThresholdUSec": "200ms",
"MemoryPressureWatch": "auto",
"MemorySwapCurrent": "0",
"MemorySwapMax": "infinity",
"MemorySwapPeak": "0",
"MemoryZSwapCurrent": "0",
"MemoryZSwapMax": "infinity",
"MemoryZSwapWriteback": "yes",
"MountAPIVFS": "no",
"MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAPolicy": "n/a",
"Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMPolicy": "stop",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"OnSuccessJobMode": "fail",
"Perpetual": "no",
"PrivateDevices": "yes",
"PrivateIPC": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivatePIDs": "no",
"PrivateTmp": "no",
"PrivateTmpEx": "no",
"PrivateUsers": "no",
"PrivateUsersEx": "no",
"ProcSubset": "all",
"ProtectClock": "yes",
"ProtectControlGroups": "yes",
"ProtectControlGroupsEx": "yes",
"ProtectHome": "yes",
"ProtectHostname": "yes",
"ProtectKernelLogs": "yes",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectProc": "default",
"ProtectSystem": "yes",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"ReloadResult": "success",
"ReloadSignal": "1",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "dbus.socket system.slice sysinit.target",
"Restart": "no",
"RestartKillSignal": "15",
"RestartMaxDelayUSec": "infinity",
"RestartMode": "normal",
"RestartSteps": "0",
"RestartUSec": "100ms",
"RestartUSecNext": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "yes",
"RestrictSUIDSGID": "yes",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RootEphemeral": "no",
"RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"RuntimeRandomizedExtraUSec": "0",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"SetLoginEnvironment": "no",
"Slice": "system.slice",
"StandardError": "null",
"StandardInput": "null",
"StandardOutput": "null",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StartupMemoryHigh": "infinity",
"StartupMemoryLow": "0",
"StartupMemoryMax": "infinity",
"StartupMemorySwapMax": "infinity",
"StartupMemoryZSwapMax": "infinity",
"StateChangeTimestamp": "Sat 2025-02-08 11:39:44 EST",
"StateChangeTimestampMonotonic": "717514864",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "running",
"SuccessAction": "none",
"SurviveFinalKillSignal": "no",
"SyslogFacility": "3",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallArchitectures": "native",
"SystemCallErrorNumber": "2147483646",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "2",
"TasksMax": "22364",
"TimeoutAbortUSec": "1min 30s",
"TimeoutCleanUSec": "infinity",
"TimeoutStartFailureMode": "terminate",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopFailureMode": "terminate",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "dbus",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "enabled",
"UnitFileState": "enabled",
"UtmpMode": "init",
"WantedBy": "multi-user.target",
"Wants": "network-pre.target",
"WatchdogSignal": "6",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:34
Saturday 08 February 2025 11:39:57 -0500 (0:00:00.575) 0:01:32.446 *****
ok: [managed-node1] => {
"ansible_facts": {
"__firewall_previous_replaced": false,
"__firewall_python_cmd": "/usr/bin/python3.12",
"__firewall_report_changed": true
},
"changed": false
}
TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:43
Saturday 08 February 2025 11:39:57 -0500 (0:00:00.039) 0:01:32.486 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_previous_replaced | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:55
Saturday 08 February 2025 11:39:57 -0500 (0:00:00.028) 0:01:32.515 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_previous_replaced | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Configure firewall] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71
Saturday 08 February 2025 11:39:57 -0500 (0:00:00.029) 0:01:32.544 *****
ok: [managed-node1] => (item={'port': '8000/tcp', 'state': 'enabled'}) => {
"__firewall_changed": false,
"ansible_loop_var": "item",
"changed": false,
"item": {
"port": "8000/tcp",
"state": "enabled"
}
}
ok: [managed-node1] => (item={'port': '9000/tcp', 'state': 'enabled'}) => {
"__firewall_changed": false,
"ansible_loop_var": "item",
"changed": false,
"item": {
"port": "9000/tcp",
"state": "enabled"
}
}
TASK [fedora.linux_system_roles.firewall : Gather firewall config information] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:120
Saturday 08 February 2025 11:39:58 -0500 (0:00:00.994) 0:01:33.539 *****
skipping: [managed-node1] => (item={'port': '8000/tcp', 'state': 'enabled'}) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall | length == 1",
"item": {
"port": "8000/tcp",
"state": "enabled"
},
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item={'port': '9000/tcp', 'state': 'enabled'}) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall | length == 1",
"item": {
"port": "9000/tcp",
"state": "enabled"
},
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => {
"changed": false
}
MSG:
All items skipped
TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] *******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130
Saturday 08 February 2025 11:39:58 -0500 (0:00:00.047) 0:01:33.587 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "firewall | length == 1",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:139
Saturday 08 February 2025 11:39:58 -0500 (0:00:00.031) 0:01:33.618 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "firewall == None or firewall | length == 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] *******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:144
Saturday 08 February 2025 11:39:58 -0500 (0:00:00.030) 0:01:33.649 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "firewall == None or firewall | length == 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:153
Saturday 08 February 2025 11:39:58 -0500 (0:00:00.029) 0:01:33.678 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_previous_replaced | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Calculate what has changed] *********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:163
Saturday 08 February 2025 11:39:58 -0500 (0:00:00.028) 0:01:33.707 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_previous_replaced | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Show diffs] *************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:169
Saturday 08 February 2025 11:39:58 -0500 (0:00:00.034) 0:01:33.741 *****
skipping: [managed-node1] => {
"false_condition": "__firewall_previous_replaced | bool"
}
TASK [Manage selinux for specified ports] **************************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146
Saturday 08 February 2025 11:39:58 -0500 (0:00:00.115) 0:01:33.856 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_selinux_ports | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153
Saturday 08 February 2025 11:39:58 -0500 (0:00:00.050) 0:01:33.907 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_cancel_user_linger": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] *******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157
Saturday 08 February 2025 11:39:58 -0500 (0:00:00.062) 0:01:33.969 *****
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle credential files - present] ****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166
Saturday 08 February 2025 11:39:59 -0500 (0:00:00.049) 0:01:34.019 *****
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle secrets] ***********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175
Saturday 08 February 2025 11:39:59 -0500 (0:00:00.040) 0:01:34.059 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node1 => (item=(censored due to no_log))
TASK [fedora.linux_system_roles.podman : Set variables part 1] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3
Saturday 08 February 2025 11:39:59 -0500 (0:00:00.156) 0:01:34.216 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7
Saturday 08 February 2025 11:39:59 -0500 (0:00:00.049) 0:01:34.266 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 => (item=(censored due to no_log))
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 08 February 2025 11:39:59 -0500 (0:00:00.090) 0:01:34.356 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 08 February 2025 11:39:59 -0500 (0:00:00.058) 0:01:34.414 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 08 February 2025 11:39:59 -0500 (0:00:00.053) 0:01:34.468 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Saturday 08 February 2025 11:39:59 -0500 (0:00:00.054) 0:01:34.522 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Saturday 08 February 2025 11:39:59 -0500 (0:00:00.036) 0:01:34.559 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Saturday 08 February 2025 11:39:59 -0500 (0:00:00.037) 0:01:34.596 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Saturday 08 February 2025 11:39:59 -0500 (0:00:00.033) 0:01:34.630 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Saturday 08 February 2025 11:39:59 -0500 (0:00:00.031) 0:01:34.661 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Saturday 08 February 2025 11:39:59 -0500 (0:00:00.029) 0:01:34.691 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Saturday 08 February 2025 11:39:59 -0500 (0:00:00.030) 0:01:34.721 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Saturday 08 February 2025 11:39:59 -0500 (0:00:00.029) 0:01:34.750 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Saturday 08 February 2025 11:39:59 -0500 (0:00:00.068) 0:01:34.819 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set variables part 2] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14
Saturday 08 February 2025 11:39:59 -0500 (0:00:00.047) 0:01:34.866 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_rootless": false,
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20
Saturday 08 February 2025 11:39:59 -0500 (0:00:00.054) 0:01:34.920 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 08 February 2025 11:39:59 -0500 (0:00:00.072) 0:01:34.992 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 08 February 2025 11:40:00 -0500 (0:00:00.032) 0:01:35.025 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 08 February 2025 11:40:00 -0500 (0:00:00.031) 0:01:35.056 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25
Saturday 08 February 2025 11:40:00 -0500 (0:00:00.027) 0:01:35.084 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Manage each secret] *******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41
Saturday 08 February 2025 11:40:00 -0500 (0:00:00.028) 0:01:35.112 *****
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Set variables part 1] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3
Saturday 08 February 2025 11:40:00 -0500 (0:00:00.414) 0:01:35.526 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7
Saturday 08 February 2025 11:40:00 -0500 (0:00:00.034) 0:01:35.561 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 => (item=(censored due to no_log))
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 08 February 2025 11:40:00 -0500 (0:00:00.065) 0:01:35.626 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 08 February 2025 11:40:00 -0500 (0:00:00.053) 0:01:35.679 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 08 February 2025 11:40:00 -0500 (0:00:00.053) 0:01:35.733 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Saturday 08 February 2025 11:40:00 -0500 (0:00:00.066) 0:01:35.800 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Saturday 08 February 2025 11:40:00 -0500 (0:00:00.049) 0:01:35.849 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Saturday 08 February 2025 11:40:00 -0500 (0:00:00.049) 0:01:35.899 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Saturday 08 February 2025 11:40:00 -0500 (0:00:00.052) 0:01:35.951 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Saturday 08 February 2025 11:40:00 -0500 (0:00:00.055) 0:01:36.007 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Saturday 08 February 2025 11:40:01 -0500 (0:00:00.050) 0:01:36.057 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Saturday 08 February 2025 11:40:01 -0500 (0:00:00.050) 0:01:36.108 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Saturday 08 February 2025 11:40:01 -0500 (0:00:00.102) 0:01:36.210 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Saturday 08 February 2025 11:40:01 -0500 (0:00:00.047) 0:01:36.258 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set variables part 2] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14
Saturday 08 February 2025 11:40:01 -0500 (0:00:00.047) 0:01:36.306 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_rootless": false,
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20
Saturday 08 February 2025 11:40:01 -0500 (0:00:00.059) 0:01:36.366 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 08 February 2025 11:40:01 -0500 (0:00:00.093) 0:01:36.459 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 08 February 2025 11:40:01 -0500 (0:00:00.044) 0:01:36.504 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 08 February 2025 11:40:01 -0500 (0:00:00.044) 0:01:36.548 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25
Saturday 08 February 2025 11:40:01 -0500 (0:00:00.046) 0:01:36.595 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Manage each secret] *******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41
Saturday 08 February 2025 11:40:01 -0500 (0:00:00.044) 0:01:36.639 *****
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Set variables part 1] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3
Saturday 08 February 2025 11:40:02 -0500 (0:00:00.437) 0:01:37.077 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7
Saturday 08 February 2025 11:40:02 -0500 (0:00:00.039) 0:01:37.117 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 => (item=(censored due to no_log))
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 08 February 2025 11:40:02 -0500 (0:00:00.067) 0:01:37.185 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 08 February 2025 11:40:02 -0500 (0:00:00.037) 0:01:37.223 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 08 February 2025 11:40:02 -0500 (0:00:00.038) 0:01:37.261 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Saturday 08 February 2025 11:40:02 -0500 (0:00:00.043) 0:01:37.304 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Saturday 08 February 2025 11:40:02 -0500 (0:00:00.029) 0:01:37.334 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Saturday 08 February 2025 11:40:02 -0500 (0:00:00.029) 0:01:37.363 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Saturday 08 February 2025 11:40:02 -0500 (0:00:00.028) 0:01:37.392 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Saturday 08 February 2025 11:40:02 -0500 (0:00:00.029) 0:01:37.421 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Saturday 08 February 2025 11:40:02 -0500 (0:00:00.028) 0:01:37.449 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Saturday 08 February 2025 11:40:02 -0500 (0:00:00.067) 0:01:37.517 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Saturday 08 February 2025 11:40:02 -0500 (0:00:00.031) 0:01:37.548 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Saturday 08 February 2025 11:40:02 -0500 (0:00:00.032) 0:01:37.580 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set variables part 2] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14
Saturday 08 February 2025 11:40:02 -0500 (0:00:00.029) 0:01:37.610 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_rootless": false,
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20
Saturday 08 February 2025 11:40:02 -0500 (0:00:00.037) 0:01:37.647 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 08 February 2025 11:40:02 -0500 (0:00:00.076) 0:01:37.723 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 08 February 2025 11:40:02 -0500 (0:00:00.042) 0:01:37.766 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 08 February 2025 11:40:02 -0500 (0:00:00.035) 0:01:37.801 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25
Saturday 08 February 2025 11:40:02 -0500 (0:00:00.033) 0:01:37.835 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Manage each secret] *******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41
Saturday 08 February 2025 11:40:02 -0500 (0:00:00.034) 0:01:37.869 *****
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182
Saturday 08 February 2025 11:40:03 -0500 (0:00:00.424) 0:01:38.293 *****
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189
Saturday 08 February 2025 11:40:03 -0500 (0:00:00.033) 0:01:38.327 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Saturday 08 February 2025 11:40:03 -0500 (0:00:00.149) 0:01:38.476 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "quadlet-demo.kube",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Unit]\nRequires=quadlet-demo-mysql.service\nAfter=quadlet-demo-mysql.service\n\n[Kube]\n# Point to the yaml file in the same directory\nYaml=quadlet-demo.yml\n# Use the quadlet-demo network\nNetwork=quadlet-demo.network\n# Publish the envoy proxy data port\nPublishPort=8000:8080\n# Publish the envoy proxy admin port\nPublishPort=9000:9901\n# Use the envoy proxy config map in the same directory\nConfigMap=envoy-proxy-configmap.yml",
"__podman_quadlet_template_src": ""
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Saturday 08 February 2025 11:40:03 -0500 (0:00:00.041) 0:01:38.517 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "absent",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Saturday 08 February 2025 11:40:03 -0500 (0:00:00.050) 0:01:38.568 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Saturday 08 February 2025 11:40:03 -0500 (0:00:00.048) 0:01:38.617 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo",
"__podman_quadlet_type": "kube",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Saturday 08 February 2025 11:40:03 -0500 (0:00:00.073) 0:01:38.691 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 08 February 2025 11:40:03 -0500 (0:00:00.146) 0:01:38.838 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 08 February 2025 11:40:03 -0500 (0:00:00.043) 0:01:38.881 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 08 February 2025 11:40:03 -0500 (0:00:00.040) 0:01:38.921 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Saturday 08 February 2025 11:40:03 -0500 (0:00:00.050) 0:01:38.972 *****
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1739032452.5561087,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
"ctime": 1739032446.715072,
"dev": 51714,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 9150722,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-pie-executable",
"mode": "0755",
"mtime": 1730678400.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 15744,
"uid": 0,
"version": "796201794",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Saturday 08 February 2025 11:40:04 -0500 (0:00:00.380) 0:01:39.353 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Saturday 08 February 2025 11:40:04 -0500 (0:00:00.038) 0:01:39.391 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Saturday 08 February 2025 11:40:04 -0500 (0:00:00.034) 0:01:39.426 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Saturday 08 February 2025 11:40:04 -0500 (0:00:00.035) 0:01:39.462 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Saturday 08 February 2025 11:40:04 -0500 (0:00:00.030) 0:01:39.493 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Saturday 08 February 2025 11:40:04 -0500 (0:00:00.031) 0:01:39.524 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Saturday 08 February 2025 11:40:04 -0500 (0:00:00.030) 0:01:39.555 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Saturday 08 February 2025 11:40:04 -0500 (0:00:00.032) 0:01:39.587 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Saturday 08 February 2025 11:40:04 -0500 (0:00:00.031) 0:01:39.618 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": [
"quadlet-demo.yml"
],
"__podman_service_name": "quadlet-demo.service",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Saturday 08 February 2025 11:40:04 -0500 (0:00:00.053) 0:01:39.672 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Saturday 08 February 2025 11:40:04 -0500 (0:00:00.031) 0:01:39.704 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state != \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Saturday 08 February 2025 11:40:04 -0500 (0:00:00.029) 0:01:39.733 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.kube",
"__podman_volumes": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105
Saturday 08 February 2025 11:40:04 -0500 (0:00:00.071) 0:01:39.805 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112
Saturday 08 February 2025 11:40:04 -0500 (0:00:00.041) 0:01:39.847 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4
Saturday 08 February 2025 11:40:04 -0500 (0:00:00.110) 0:01:39.957 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stop and disable service] *************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12
Saturday 08 February 2025 11:40:04 -0500 (0:00:00.032) 0:01:39.989 *****
changed: [managed-node1] => {
"changed": true,
"enabled": false,
"failed_when_result": false,
"name": "quadlet-demo.service",
"state": "stopped",
"status": {
"AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0",
"ActiveEnterTimestamp": "Sat 2025-02-08 11:39:45 EST",
"ActiveEnterTimestampMonotonic": "718655547",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "active",
"After": "sysinit.target quadlet-demo-mysql.service -.mount network-online.target systemd-journald.socket quadlet-demo-network.service basic.target system.slice",
"AllowIsolate": "no",
"AssertResult": "yes",
"AssertTimestamp": "Sat 2025-02-08 11:39:44 EST",
"AssertTimestampMonotonic": "718158327",
"Before": "multi-user.target shutdown.target",
"BindLogSockets": "no",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "yes",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "252844000",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanLiveMount": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
"CleanResult": "success",
"CollectMode": "inactive",
"ConditionResult": "yes",
"ConditionTimestamp": "Sat 2025-02-08 11:39:44 EST",
"ConditionTimestampMonotonic": "718158322",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlGroup": "/system.slice/quadlet-demo.service",
"ControlGroupId": "143796",
"ControlPID": "0",
"CoredumpFilter": "0x33",
"CoredumpReceive": "no",
"DebugInvocation": "no",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"DefaultStartupMemoryLow": "0",
"Delegate": "no",
"Description": "quadlet-demo.service",
"DevicePolicy": "auto",
"DynamicUser": "no",
"EffectiveCPUs": "0-1",
"EffectiveMemoryHigh": "3698229248",
"EffectiveMemoryMax": "3698229248",
"EffectiveMemoryNodes": "0",
"EffectiveTasksMax": "22364",
"Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo.service",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainHandoffTimestampMonotonic": "0",
"ExecMainPID": "70720",
"ExecMainStartTimestamp": "Sat 2025-02-08 11:39:45 EST",
"ExecMainStartTimestampMonotonic": "718360001",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[Sat 2025-02-08 11:39:44 EST] ; stop_time=[n/a] ; pid=70711 ; code=(null) ; status=0/0 }",
"ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[Sat 2025-02-08 11:39:44 EST] ; stop_time=[n/a] ; pid=70711 ; code=(null) ; status=0/0 }",
"ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExitType": "main",
"ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FileDescriptorStorePreserve": "restart",
"FinalKillSignal": "9",
"FragmentPath": "/run/systemd/generator/quadlet-demo.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOReadBytes": "[not set]",
"IOReadOperations": "[not set]",
"IOSchedulingClass": "2",
"IOSchedulingPriority": "4",
"IOWeight": "[not set]",
"IOWriteBytes": "[not set]",
"IOWriteOperations": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "[no data]",
"IPEgressPackets": "[no data]",
"IPIngressBytes": "[no data]",
"IPIngressPackets": "[no data]",
"Id": "quadlet-demo.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestamp": "Sat 2025-02-08 11:39:44 EST",
"InactiveExitTimestampMonotonic": "718160193",
"InvocationID": "286d95d8400c4cd181691a69cd438c02",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "infinity",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "8388608",
"LimitMEMLOCKSoft": "8388608",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "524288",
"LimitNOFILESoft": "1024",
"LimitNPROC": "13977",
"LimitNPROCSoft": "13977",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "13977",
"LimitSIGPENDINGSoft": "13977",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LiveMountResult": "success",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "70720",
"ManagedOOMMemoryPressure": "auto",
"ManagedOOMMemoryPressureDurationUSec": "[not set]",
"ManagedOOMMemoryPressureLimit": "0",
"ManagedOOMPreference": "none",
"ManagedOOMSwap": "auto",
"MemoryAccounting": "yes",
"MemoryAvailable": "2528829440",
"MemoryCurrent": "3002368",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryKSM": "no",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemoryPeak": "25882624",
"MemoryPressureThresholdUSec": "200ms",
"MemoryPressureWatch": "auto",
"MemorySwapCurrent": "0",
"MemorySwapMax": "infinity",
"MemorySwapPeak": "0",
"MemoryZSwapCurrent": "0",
"MemoryZSwapMax": "infinity",
"MemoryZSwapWriteback": "yes",
"MountAPIVFS": "no",
"MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAPolicy": "n/a",
"Names": "quadlet-demo.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "all",
"OOMPolicy": "stop",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"OnSuccessJobMode": "fail",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateIPC": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivatePIDs": "no",
"PrivateTmp": "no",
"PrivateTmpEx": "no",
"PrivateUsers": "no",
"PrivateUsersEx": "no",
"ProcSubset": "all",
"ProtectClock": "no",
"ProtectControlGroups": "no",
"ProtectControlGroupsEx": "no",
"ProtectHome": "no",
"ProtectHostname": "no",
"ProtectKernelLogs": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectProc": "default",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"ReloadResult": "success",
"ReloadSignal": "1",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "quadlet-demo-mysql.service quadlet-demo-network.service system.slice sysinit.target -.mount",
"RequiresMountsFor": "/run/containers",
"Restart": "no",
"RestartKillSignal": "15",
"RestartMaxDelayUSec": "infinity",
"RestartMode": "normal",
"RestartSteps": "0",
"RestartUSec": "100ms",
"RestartUSecNext": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RootEphemeral": "no",
"RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"RuntimeRandomizedExtraUSec": "0",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"SetLoginEnvironment": "no",
"Slice": "system.slice",
"SourcePath": "/etc/containers/systemd/quadlet-demo.kube",
"StandardError": "inherit",
"StandardInput": "null",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StartupMemoryHigh": "infinity",
"StartupMemoryLow": "0",
"StartupMemoryMax": "infinity",
"StartupMemorySwapMax": "infinity",
"StartupMemoryZSwapMax": "infinity",
"StateChangeTimestamp": "Sat 2025-02-08 11:39:45 EST",
"StateChangeTimestampMonotonic": "718655547",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "running",
"SuccessAction": "none",
"SurviveFinalKillSignal": "no",
"SyslogFacility": "3",
"SyslogIdentifier": "quadlet-demo",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "2147483646",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "4",
"TasksMax": "22364",
"TimeoutAbortUSec": "1min 30s",
"TimeoutCleanUSec": "infinity",
"TimeoutStartFailureMode": "terminate",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopFailureMode": "terminate",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "notify",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "generated",
"UtmpMode": "init",
"WantedBy": "multi-user.target",
"Wants": "network-online.target",
"WatchdogSignal": "6",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.podman : See if quadlet file exists] ***********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33
Saturday 08 February 2025 11:40:06 -0500 (0:00:01.414) 0:01:41.403 *****
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1739032783.4648616,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 8,
"charset": "us-ascii",
"checksum": "7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7",
"ctime": 1739032783.4678617,
"dev": 51714,
"device_type": 0,
"executable": false,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 62915510,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "text/plain",
"mode": "0644",
"mtime": 1739032783.2018604,
"nlink": 1,
"path": "/etc/containers/systemd/quadlet-demo.kube",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 456,
"uid": 0,
"version": "2932427493",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": false,
"xoth": false,
"xusr": false
}
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38
Saturday 08 February 2025 11:40:06 -0500 (0:00:00.395) 0:01:41.799 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Slurp quadlet file] *******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6
Saturday 08 February 2025 11:40:06 -0500 (0:00:00.057) 0:01:41.857 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12
Saturday 08 February 2025 11:40:07 -0500 (0:00:00.362) 0:01:42.219 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44
Saturday 08 February 2025 11:40:07 -0500 (0:00:00.056) 0:01:42.276 *****
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Reset raw variable] *******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52
Saturday 08 February 2025 11:40:07 -0500 (0:00:00.033) 0:01:42.309 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_raw": null
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Remove quadlet file] ******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42
Saturday 08 February 2025 11:40:07 -0500 (0:00:00.031) 0:01:42.341 *****
changed: [managed-node1] => {
"changed": true,
"path": "/etc/containers/systemd/quadlet-demo.kube",
"state": "absent"
}
TASK [fedora.linux_system_roles.podman : Refresh systemd] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48
Saturday 08 February 2025 11:40:07 -0500 (0:00:00.371) 0:01:42.713 *****
ok: [managed-node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Remove managed resource] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58
Saturday 08 February 2025 11:40:08 -0500 (0:00:00.743) 0:01:43.457 *****
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Remove volumes] ***********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99
Saturday 08 February 2025 11:40:08 -0500 (0:00:00.035) 0:01:43.492 *****
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] *********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116
Saturday 08 February 2025 11:40:08 -0500 (0:00:00.067) 0:01:43.560 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_parsed": null
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120
Saturday 08 February 2025 11:40:08 -0500 (0:00:00.035) 0:01:43.595 *****
changed: [managed-node1] => {
"changed": true,
"cmd": [
"podman",
"image",
"prune",
"--all",
"-f"
],
"delta": "0:00:00.744566",
"end": "2025-02-08 11:40:09.643553",
"rc": 0,
"start": "2025-02-08 11:40:08.898987"
}
STDOUT:
9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f
f408d5f24b1df22e79ff47fd950d70bbf308134db66b52ba29e6cd81b656382a
fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b
5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131
Saturday 08 February 2025 11:40:09 -0500 (0:00:01.130) 0:01:44.726 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 08 February 2025 11:40:09 -0500 (0:00:00.054) 0:01:44.781 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 08 February 2025 11:40:09 -0500 (0:00:00.028) 0:01:44.809 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 08 February 2025 11:40:09 -0500 (0:00:00.028) 0:01:44.838 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - images] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141
Saturday 08 February 2025 11:40:09 -0500 (0:00:00.027) 0:01:44.865 *****
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"images",
"-n"
],
"delta": "0:00:00.031415",
"end": "2025-02-08 11:40:10.185651",
"rc": 0,
"start": "2025-02-08 11:40:10.154236"
}
STDOUT:
quay.io/libpod/registry 2.8.2 0030ba3d620c 18 months ago 24.6 MB
quay.io/linux-system-roles/mysql 5.6 dd3b2a5dcb48 3 years ago 308 MB
TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150
Saturday 08 February 2025 11:40:10 -0500 (0:00:00.400) 0:01:45.266 *****
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"volume",
"ls",
"-n"
],
"delta": "0:00:00.028034",
"end": "2025-02-08 11:40:10.583970",
"rc": 0,
"start": "2025-02-08 11:40:10.555936"
}
STDOUT:
local 2ce71b63066ed4402ab34684197d9d1868b43cd94dbf701d92e842ee487e7893
local systemd-quadlet-demo-mysql
local wp-pv-claim
local envoy-proxy-config
local envoy-certificates
TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159
Saturday 08 February 2025 11:40:10 -0500 (0:00:00.397) 0:01:45.663 *****
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"ps",
"--noheading"
],
"delta": "0:00:00.033327",
"end": "2025-02-08 11:40:10.981277",
"rc": 0,
"start": "2025-02-08 11:40:10.947950"
}
STDOUT:
93148221a3fd quay.io/libpod/registry:2.8.2 /etc/docker/regis... 5 minutes ago Up 5 minutes 127.0.0.1:5000->5000/tcp podman_registry
05159afa0f59 quay.io/linux-system-roles/mysql:5.6 mysqld 57 seconds ago Up 56 seconds (healthy) 3306/tcp quadlet-demo-mysql
TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168
Saturday 08 February 2025 11:40:11 -0500 (0:00:00.445) 0:01:46.109 *****
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"network",
"ls",
"-n",
"-q"
],
"delta": "0:00:00.028037",
"end": "2025-02-08 11:40:11.433595",
"rc": 0,
"start": "2025-02-08 11:40:11.405558"
}
STDOUT:
podman
podman-default-kube-network
systemd-quadlet-demo
TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177
Saturday 08 February 2025 11:40:11 -0500 (0:00:00.404) 0:01:46.514 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187
Saturday 08 February 2025 11:40:11 -0500 (0:00:00.418) 0:01:46.933 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - services] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197
Saturday 08 February 2025 11:40:12 -0500 (0:00:00.423) 0:01:47.357 *****
ok: [managed-node1] => {
"ansible_facts": {
"services": {
"05159afa0f593fffd2274cad1533e16d9907a26d6e0621fcc4c448b5b2ab088f-46ffaa87b097264a.service": {
"name": "05159afa0f593fffd2274cad1533e16d9907a26d6e0621fcc4c448b5b2ab088f-46ffaa87b097264a.service",
"source": "systemd",
"state": "stopped",
"status": "transient"
},
"NetworkManager-dispatcher.service": {
"name": "NetworkManager-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"NetworkManager-wait-online.service": {
"name": "NetworkManager-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"NetworkManager.service": {
"name": "NetworkManager.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"apt-daily.service": {
"name": "apt-daily.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"audit-rules.service": {
"name": "audit-rules.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"auditd.service": {
"name": "auditd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"auth-rpcgss-module.service": {
"name": "auth-rpcgss-module.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"autofs.service": {
"name": "autofs.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"autovt@.service": {
"name": "autovt@.service",
"source": "systemd",
"state": "unknown",
"status": "alias"
},
"blk-availability.service": {
"name": "blk-availability.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"capsule@.service": {
"name": "capsule@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"certmonger.service": {
"name": "certmonger.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"chrony-wait.service": {
"name": "chrony-wait.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"chronyd-restricted.service": {
"name": "chronyd-restricted.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"chronyd.service": {
"name": "chronyd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"cloud-config.service": {
"name": "cloud-config.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-final.service": {
"name": "cloud-final.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init-hotplugd.service": {
"name": "cloud-init-hotplugd.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"cloud-init-local.service": {
"name": "cloud-init-local.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init.service": {
"name": "cloud-init.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"console-getty.service": {
"name": "console-getty.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"container-getty@.service": {
"name": "container-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"crond.service": {
"name": "crond.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"dbus-broker.service": {
"name": "dbus-broker.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"dbus-org.fedoraproject.FirewallD1.service": {
"name": "dbus-org.fedoraproject.FirewallD1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.hostname1.service": {
"name": "dbus-org.freedesktop.hostname1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.locale1.service": {
"name": "dbus-org.freedesktop.locale1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.login1.service": {
"name": "dbus-org.freedesktop.login1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.nm-dispatcher.service": {
"name": "dbus-org.freedesktop.nm-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.timedate1.service": {
"name": "dbus-org.freedesktop.timedate1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus.service": {
"name": "dbus.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"debug-shell.service": {
"name": "debug-shell.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dhcpcd.service": {
"name": "dhcpcd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dhcpcd@.service": {
"name": "dhcpcd@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"display-manager.service": {
"name": "display-manager.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"dm-event.service": {
"name": "dm-event.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dnf-makecache.service": {
"name": "dnf-makecache.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dnf-system-upgrade-cleanup.service": {
"name": "dnf-system-upgrade-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dnf-system-upgrade.service": {
"name": "dnf-system-upgrade.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dracut-cmdline.service": {
"name": "dracut-cmdline.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-initqueue.service": {
"name": "dracut-initqueue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-mount.service": {
"name": "dracut-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-mount.service": {
"name": "dracut-pre-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-pivot.service": {
"name": "dracut-pre-pivot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-trigger.service": {
"name": "dracut-pre-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-udev.service": {
"name": "dracut-pre-udev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown-onfailure.service": {
"name": "dracut-shutdown-onfailure.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown.service": {
"name": "dracut-shutdown.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ebtables.service": {
"name": "ebtables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"emergency.service": {
"name": "emergency.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"fips-crypto-policy-overlay.service": {
"name": "fips-crypto-policy-overlay.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"firewalld.service": {
"name": "firewalld.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"fsidd.service": {
"name": "fsidd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"fstrim.service": {
"name": "fstrim.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"getty@.service": {
"name": "getty@.service",
"source": "systemd",
"state": "unknown",
"status": "enabled"
},
"getty@tty1.service": {
"name": "getty@tty1.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"grub-boot-indeterminate.service": {
"name": "grub-boot-indeterminate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"grub2-systemd-integration.service": {
"name": "grub2-systemd-integration.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"gssproxy.service": {
"name": "gssproxy.service",
"source": "systemd",
"state": "running",
"status": "disabled"
},
"hv_kvp_daemon.service": {
"name": "hv_kvp_daemon.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"initrd-cleanup.service": {
"name": "initrd-cleanup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-parse-etc.service": {
"name": "initrd-parse-etc.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-switch-root.service": {
"name": "initrd-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-udevadm-cleanup-db.service": {
"name": "initrd-udevadm-cleanup-db.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ip6tables.service": {
"name": "ip6tables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ipset.service": {
"name": "ipset.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"iptables.service": {
"name": "iptables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"irqbalance.service": {
"name": "irqbalance.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"kdump.service": {
"name": "kdump.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"kmod-static-nodes.service": {
"name": "kmod-static-nodes.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"kvm_stat.service": {
"name": "kvm_stat.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"ldconfig.service": {
"name": "ldconfig.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"logrotate.service": {
"name": "logrotate.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"lvm-devices-import.service": {
"name": "lvm-devices-import.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"lvm2-lvmpolld.service": {
"name": "lvm2-lvmpolld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"lvm2-monitor.service": {
"name": "lvm2-monitor.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"man-db-cache-update.service": {
"name": "man-db-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"man-db-restart-cache-update.service": {
"name": "man-db-restart-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"microcode.service": {
"name": "microcode.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"modprobe@.service": {
"name": "modprobe@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"modprobe@configfs.service": {
"name": "modprobe@configfs.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@dm_mod.service": {
"name": "modprobe@dm_mod.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@drm.service": {
"name": "modprobe@drm.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@efi_pstore.service": {
"name": "modprobe@efi_pstore.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@fuse.service": {
"name": "modprobe@fuse.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@loop.service": {
"name": "modprobe@loop.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"netavark-dhcp-proxy.service": {
"name": "netavark-dhcp-proxy.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"netavark-firewalld-reload.service": {
"name": "netavark-firewalld-reload.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-blkmap.service": {
"name": "nfs-blkmap.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-idmapd.service": {
"name": "nfs-idmapd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-mountd.service": {
"name": "nfs-mountd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-server.service": {
"name": "nfs-server.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"nfs-utils.service": {
"name": "nfs-utils.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfsdcld.service": {
"name": "nfsdcld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nftables.service": {
"name": "nftables.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nis-domainname.service": {
"name": "nis-domainname.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nm-priv-helper.service": {
"name": "nm-priv-helper.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"ntpd.service": {
"name": "ntpd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ntpdate.service": {
"name": "ntpdate.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"pam_namespace.service": {
"name": "pam_namespace.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"pcscd.service": {
"name": "pcscd.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"plymouth-quit-wait.service": {
"name": "plymouth-quit-wait.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"plymouth-start.service": {
"name": "plymouth-start.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"podman-auto-update.service": {
"name": "podman-auto-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman-clean-transient.service": {
"name": "podman-clean-transient.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman-kube@.service": {
"name": "podman-kube@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"podman-restart.service": {
"name": "podman-restart.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman.service": {
"name": "podman.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"polkit.service": {
"name": "polkit.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"qemu-guest-agent.service": {
"name": "qemu-guest-agent.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"quadlet-demo-mysql-volume.service": {
"name": "quadlet-demo-mysql-volume.service",
"source": "systemd",
"state": "stopped",
"status": "generated"
},
"quadlet-demo-mysql.service": {
"name": "quadlet-demo-mysql.service",
"source": "systemd",
"state": "running",
"status": "generated"
},
"quadlet-demo-network.service": {
"name": "quadlet-demo-network.service",
"source": "systemd",
"state": "stopped",
"status": "generated"
},
"quotaon-root.service": {
"name": "quotaon-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"quotaon@.service": {
"name": "quotaon@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"rc-local.service": {
"name": "rc-local.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rescue.service": {
"name": "rescue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"restraintd.service": {
"name": "restraintd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rngd.service": {
"name": "rngd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rpc-gssd.service": {
"name": "rpc-gssd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd-notify.service": {
"name": "rpc-statd-notify.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd.service": {
"name": "rpc-statd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-svcgssd.service": {
"name": "rpc-svcgssd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"rpcbind.service": {
"name": "rpcbind.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rpmdb-migrate.service": {
"name": "rpmdb-migrate.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"rpmdb-rebuild.service": {
"name": "rpmdb-rebuild.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"rsyslog.service": {
"name": "rsyslog.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"selinux-autorelabel-mark.service": {
"name": "selinux-autorelabel-mark.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"selinux-autorelabel.service": {
"name": "selinux-autorelabel.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"selinux-check-proper-disable.service": {
"name": "selinux-check-proper-disable.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"serial-getty@.service": {
"name": "serial-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "indirect"
},
"serial-getty@ttyS0.service": {
"name": "serial-getty@ttyS0.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"sntp.service": {
"name": "sntp.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ssh-host-keys-migration.service": {
"name": "ssh-host-keys-migration.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"sshd-keygen.service": {
"name": "sshd-keygen.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"sshd-keygen@.service": {
"name": "sshd-keygen@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"sshd-keygen@ecdsa.service": {
"name": "sshd-keygen@ecdsa.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-keygen@ed25519.service": {
"name": "sshd-keygen@ed25519.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-keygen@rsa.service": {
"name": "sshd-keygen@rsa.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-unix-local@.service": {
"name": "sshd-unix-local@.service",
"source": "systemd",
"state": "unknown",
"status": "alias"
},
"sshd-vsock@.service": {
"name": "sshd-vsock@.service",
"source": "systemd",
"state": "unknown",
"status": "alias"
},
"sshd.service": {
"name": "sshd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"sshd@.service": {
"name": "sshd@.service",
"source": "systemd",
"state": "unknown",
"status": "indirect"
},
"sssd-autofs.service": {
"name": "sssd-autofs.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-kcm.service": {
"name": "sssd-kcm.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"sssd-nss.service": {
"name": "sssd-nss.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pac.service": {
"name": "sssd-pac.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pam.service": {
"name": "sssd-pam.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-ssh.service": {
"name": "sssd-ssh.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-sudo.service": {
"name": "sssd-sudo.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd.service": {
"name": "sssd.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"syslog.service": {
"name": "syslog.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"system-update-cleanup.service": {
"name": "system-update-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-ask-password-console.service": {
"name": "systemd-ask-password-console.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-wall.service": {
"name": "systemd-ask-password-wall.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-backlight@.service": {
"name": "systemd-backlight@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-battery-check.service": {
"name": "systemd-battery-check.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-binfmt.service": {
"name": "systemd-binfmt.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-bless-boot.service": {
"name": "systemd-bless-boot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-boot-check-no-failures.service": {
"name": "systemd-boot-check-no-failures.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-boot-random-seed.service": {
"name": "systemd-boot-random-seed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-boot-update.service": {
"name": "systemd-boot-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-bootctl@.service": {
"name": "systemd-bootctl@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-confext.service": {
"name": "systemd-confext.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-coredump@.service": {
"name": "systemd-coredump@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-creds@.service": {
"name": "systemd-creds@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-exit.service": {
"name": "systemd-exit.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-firstboot.service": {
"name": "systemd-firstboot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck-root.service": {
"name": "systemd-fsck-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck@.service": {
"name": "systemd-fsck@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-growfs-root.service": {
"name": "systemd-growfs-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-growfs@.service": {
"name": "systemd-growfs@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-halt.service": {
"name": "systemd-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hibernate-clear.service": {
"name": "systemd-hibernate-clear.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hibernate-resume.service": {
"name": "systemd-hibernate-resume.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hibernate.service": {
"name": "systemd-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hostnamed.service": {
"name": "systemd-hostnamed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hwdb-update.service": {
"name": "systemd-hwdb-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hybrid-sleep.service": {
"name": "systemd-hybrid-sleep.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-initctl.service": {
"name": "systemd-initctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-catalog-update.service": {
"name": "systemd-journal-catalog-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-flush.service": {
"name": "systemd-journal-flush.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journald-sync@.service": {
"name": "systemd-journald-sync@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-journald.service": {
"name": "systemd-journald.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-journald@.service": {
"name": "systemd-journald@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-kexec.service": {
"name": "systemd-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-localed.service": {
"name": "systemd-localed.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-logind.service": {
"name": "systemd-logind.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-machine-id-commit.service": {
"name": "systemd-machine-id-commit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-modules-load.service": {
"name": "systemd-modules-load.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-network-generator.service": {
"name": "systemd-network-generator.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-networkd-wait-online.service": {
"name": "systemd-networkd-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"systemd-oomd.service": {
"name": "systemd-oomd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"systemd-pcrextend@.service": {
"name": "systemd-pcrextend@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-pcrfs-root.service": {
"name": "systemd-pcrfs-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-pcrfs@.service": {
"name": "systemd-pcrfs@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-pcrlock-file-system.service": {
"name": "systemd-pcrlock-file-system.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-firmware-code.service": {
"name": "systemd-pcrlock-firmware-code.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-firmware-config.service": {
"name": "systemd-pcrlock-firmware-config.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-machine-id.service": {
"name": "systemd-pcrlock-machine-id.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-make-policy.service": {
"name": "systemd-pcrlock-make-policy.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-secureboot-authority.service": {
"name": "systemd-pcrlock-secureboot-authority.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-secureboot-policy.service": {
"name": "systemd-pcrlock-secureboot-policy.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock@.service": {
"name": "systemd-pcrlock@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-pcrmachine.service": {
"name": "systemd-pcrmachine.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-pcrphase-initrd.service": {
"name": "systemd-pcrphase-initrd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-pcrphase-sysinit.service": {
"name": "systemd-pcrphase-sysinit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-pcrphase.service": {
"name": "systemd-pcrphase.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-poweroff.service": {
"name": "systemd-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-pstore.service": {
"name": "systemd-pstore.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-quotacheck-root.service": {
"name": "systemd-quotacheck-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-quotacheck@.service": {
"name": "systemd-quotacheck@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-random-seed.service": {
"name": "systemd-random-seed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-reboot.service": {
"name": "systemd-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-remount-fs.service": {
"name": "systemd-remount-fs.service",
"source": "systemd",
"state": "stopped",
"status": "enabled-runtime"
},
"systemd-repart.service": {
"name": "systemd-repart.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-rfkill.service": {
"name": "systemd-rfkill.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-soft-reboot.service": {
"name": "systemd-soft-reboot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-suspend-then-hibernate.service": {
"name": "systemd-suspend-then-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend.service": {
"name": "systemd-suspend.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-sysctl.service": {
"name": "systemd-sysctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-sysext.service": {
"name": "systemd-sysext.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-sysext@.service": {
"name": "systemd-sysext@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-sysupdate-reboot.service": {
"name": "systemd-sysupdate-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"systemd-sysupdate.service": {
"name": "systemd-sysupdate.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"systemd-sysusers.service": {
"name": "systemd-sysusers.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-timedated.service": {
"name": "systemd-timedated.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-timesyncd.service": {
"name": "systemd-timesyncd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"systemd-tmpfiles-clean.service": {
"name": "systemd-tmpfiles-clean.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup-dev-early.service": {
"name": "systemd-tmpfiles-setup-dev-early.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup-dev.service": {
"name": "systemd-tmpfiles-setup-dev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup.service": {
"name": "systemd-tmpfiles-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tpm2-setup-early.service": {
"name": "systemd-tpm2-setup-early.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tpm2-setup.service": {
"name": "systemd-tpm2-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udev-load-credentials.service": {
"name": "systemd-udev-load-credentials.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"systemd-udev-settle.service": {
"name": "systemd-udev-settle.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udev-trigger.service": {
"name": "systemd-udev-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udevd.service": {
"name": "systemd-udevd.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-update-done.service": {
"name": "systemd-update-done.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp-runlevel.service": {
"name": "systemd-update-utmp-runlevel.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp.service": {
"name": "systemd-update-utmp.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-user-sessions.service": {
"name": "systemd-user-sessions.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-vconsole-setup.service": {
"name": "systemd-vconsole-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-volatile-root.service": {
"name": "systemd-volatile-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"user-runtime-dir@.service": {
"name": "user-runtime-dir@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user-runtime-dir@0.service": {
"name": "user-runtime-dir@0.service",
"source": "systemd",
"state": "stopped",
"status": "active"
},
"user@.service": {
"name": "user@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user@0.service": {
"name": "user@0.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"ypbind.service": {
"name": "ypbind.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
}
}
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116
Saturday 08 February 2025 11:40:14 -0500 (0:00:02.281) 0:01:49.638 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state != \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Saturday 08 February 2025 11:40:14 -0500 (0:00:00.033) 0:01:49.672 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "---\napiVersion: v1\nkind: PersistentVolumeClaim\nmetadata:\n name: wp-pv-claim\n labels:\n app: wordpress\nspec:\n accessModes:\n - ReadWriteOnce\n resources:\n requests:\n storage: 20Gi\n---\napiVersion: v1\nkind: Pod\nmetadata:\n name: quadlet-demo\nspec:\n containers:\n - name: wordpress\n image: quay.io/linux-system-roles/wordpress:4.8-apache\n env:\n - name: WORDPRESS_DB_HOST\n value: quadlet-demo-mysql\n - name: WORDPRESS_DB_PASSWORD\n valueFrom:\n secretKeyRef:\n name: mysql-root-password-kube\n key: password\n volumeMounts:\n - name: wordpress-persistent-storage\n mountPath: /var/www/html\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n - name: envoy\n image: quay.io/linux-system-roles/envoyproxy:v1.25.0\n volumeMounts:\n - name: config-volume\n mountPath: /etc/envoy\n - name: certificates\n mountPath: /etc/envoy-certificates\n env:\n - name: ENVOY_UID\n value: \"0\"\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n volumes:\n - name: config-volume\n configMap:\n name: envoy-proxy-config\n - name: certificates\n secret:\n secretName: envoy-certificates\n - name: wordpress-persistent-storage\n persistentVolumeClaim:\n claimName: wp-pv-claim\n - name: www # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3\n - name: create # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3-create\n",
"__podman_quadlet_template_src": "quadlet-demo.yml.j2"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Saturday 08 February 2025 11:40:14 -0500 (0:00:00.131) 0:01:49.804 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "absent",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Saturday 08 February 2025 11:40:14 -0500 (0:00:00.054) 0:01:49.858 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_str",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Saturday 08 February 2025 11:40:14 -0500 (0:00:00.041) 0:01:49.900 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo",
"__podman_quadlet_type": "yml",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Saturday 08 February 2025 11:40:14 -0500 (0:00:00.059) 0:01:49.960 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 08 February 2025 11:40:15 -0500 (0:00:00.063) 0:01:50.023 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 08 February 2025 11:40:15 -0500 (0:00:00.037) 0:01:50.061 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 08 February 2025 11:40:15 -0500 (0:00:00.037) 0:01:50.098 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Saturday 08 February 2025 11:40:15 -0500 (0:00:00.042) 0:01:50.141 *****
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1739032452.5561087,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
"ctime": 1739032446.715072,
"dev": 51714,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 9150722,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-pie-executable",
"mode": "0755",
"mtime": 1730678400.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 15744,
"uid": 0,
"version": "796201794",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Saturday 08 February 2025 11:40:15 -0500 (0:00:00.409) 0:01:50.551 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Saturday 08 February 2025 11:40:15 -0500 (0:00:00.034) 0:01:50.586 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Saturday 08 February 2025 11:40:15 -0500 (0:00:00.076) 0:01:50.663 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Saturday 08 February 2025 11:40:15 -0500 (0:00:00.036) 0:01:50.699 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Saturday 08 February 2025 11:40:15 -0500 (0:00:00.034) 0:01:50.734 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Saturday 08 February 2025 11:40:15 -0500 (0:00:00.037) 0:01:50.772 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Saturday 08 February 2025 11:40:15 -0500 (0:00:00.056) 0:01:50.829 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Saturday 08 February 2025 11:40:15 -0500 (0:00:00.057) 0:01:50.886 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Saturday 08 February 2025 11:40:15 -0500 (0:00:00.057) 0:01:50.944 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Saturday 08 February 2025 11:40:16 -0500 (0:00:00.098) 0:01:51.042 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Saturday 08 February 2025 11:40:16 -0500 (0:00:00.061) 0:01:51.104 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state != \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Saturday 08 February 2025 11:40:16 -0500 (0:00:00.052) 0:01:51.156 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.yml",
"__podman_volumes": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105
Saturday 08 February 2025 11:40:16 -0500 (0:00:00.098) 0:01:51.255 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112
Saturday 08 February 2025 11:40:16 -0500 (0:00:00.045) 0:01:51.301 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4
Saturday 08 February 2025 11:40:16 -0500 (0:00:00.077) 0:01:51.378 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stop and disable service] *************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12
Saturday 08 February 2025 11:40:16 -0500 (0:00:00.031) 0:01:51.410 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_service_name | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : See if quadlet file exists] ***********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33
Saturday 08 February 2025 11:40:16 -0500 (0:00:00.033) 0:01:51.443 *****
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1739032765.2277691,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 8,
"charset": "us-ascii",
"checksum": "998dccde0483b1654327a46ddd89cbaa47650370",
"ctime": 1739032762.6997564,
"dev": 51714,
"device_type": 0,
"executable": false,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 9208596,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "text/plain",
"mode": "0644",
"mtime": 1739032762.440755,
"nlink": 1,
"path": "/etc/containers/systemd/quadlet-demo.yml",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 1605,
"uid": 0,
"version": "436302464",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": false,
"xoth": false,
"xusr": false
}
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38
Saturday 08 February 2025 11:40:16 -0500 (0:00:00.486) 0:01:51.929 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Slurp quadlet file] *******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6
Saturday 08 February 2025 11:40:16 -0500 (0:00:00.080) 0:01:52.009 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12
Saturday 08 February 2025 11:40:17 -0500 (0:00:00.369) 0:01:52.379 *****
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44
Saturday 08 February 2025 11:40:17 -0500 (0:00:00.036) 0:01:52.415 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Reset raw variable] *******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52
Saturday 08 February 2025 11:40:17 -0500 (0:00:00.043) 0:01:52.459 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_raw": null
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Remove quadlet file] ******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42
Saturday 08 February 2025 11:40:17 -0500 (0:00:00.034) 0:01:52.493 *****
changed: [managed-node1] => {
"changed": true,
"path": "/etc/containers/systemd/quadlet-demo.yml",
"state": "absent"
}
TASK [fedora.linux_system_roles.podman : Refresh systemd] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48
Saturday 08 February 2025 11:40:17 -0500 (0:00:00.398) 0:01:52.891 *****
ok: [managed-node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Remove managed resource] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58
Saturday 08 February 2025 11:40:18 -0500 (0:00:00.801) 0:01:53.693 *****
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Remove volumes] ***********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99
Saturday 08 February 2025 11:40:18 -0500 (0:00:00.062) 0:01:53.755 *****
changed: [managed-node1] => (item=None) => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
changed: [managed-node1] => (item=None) => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
changed: [managed-node1] => (item=None) => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] *********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116
Saturday 08 February 2025 11:40:20 -0500 (0:00:01.263) 0:01:55.018 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_parsed": null
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120
Saturday 08 February 2025 11:40:20 -0500 (0:00:00.037) 0:01:55.056 *****
changed: [managed-node1] => {
"changed": true,
"cmd": [
"podman",
"image",
"prune",
"--all",
"-f"
],
"delta": "0:00:00.028150",
"end": "2025-02-08 11:40:20.394593",
"rc": 0,
"start": "2025-02-08 11:40:20.366443"
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131
Saturday 08 February 2025 11:40:20 -0500 (0:00:00.443) 0:01:55.499 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 08 February 2025 11:40:20 -0500 (0:00:00.102) 0:01:55.601 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 08 February 2025 11:40:20 -0500 (0:00:00.053) 0:01:55.654 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 08 February 2025 11:40:20 -0500 (0:00:00.114) 0:01:55.769 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - images] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141
Saturday 08 February 2025 11:40:20 -0500 (0:00:00.052) 0:01:55.821 *****
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"images",
"-n"
],
"delta": "0:00:00.031808",
"end": "2025-02-08 11:40:21.179632",
"rc": 0,
"start": "2025-02-08 11:40:21.147824"
}
STDOUT:
quay.io/libpod/registry 2.8.2 0030ba3d620c 18 months ago 24.6 MB
quay.io/linux-system-roles/mysql 5.6 dd3b2a5dcb48 3 years ago 308 MB
TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150
Saturday 08 February 2025 11:40:21 -0500 (0:00:00.473) 0:01:56.295 *****
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"volume",
"ls",
"-n"
],
"delta": "0:00:00.030182",
"end": "2025-02-08 11:40:21.649633",
"rc": 0,
"start": "2025-02-08 11:40:21.619451"
}
STDOUT:
local 2ce71b63066ed4402ab34684197d9d1868b43cd94dbf701d92e842ee487e7893
local systemd-quadlet-demo-mysql
TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159
Saturday 08 February 2025 11:40:21 -0500 (0:00:00.465) 0:01:56.761 *****
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"ps",
"--noheading"
],
"delta": "0:00:00.033554",
"end": "2025-02-08 11:40:22.116387",
"rc": 0,
"start": "2025-02-08 11:40:22.082833"
}
STDOUT:
93148221a3fd quay.io/libpod/registry:2.8.2 /etc/docker/regis... 5 minutes ago Up 5 minutes 127.0.0.1:5000->5000/tcp podman_registry
05159afa0f59 quay.io/linux-system-roles/mysql:5.6 mysqld About a minute ago Up About a minute (healthy) 3306/tcp quadlet-demo-mysql
TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168
Saturday 08 February 2025 11:40:22 -0500 (0:00:00.440) 0:01:57.201 *****
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"network",
"ls",
"-n",
"-q"
],
"delta": "0:00:00.027781",
"end": "2025-02-08 11:40:22.538143",
"rc": 0,
"start": "2025-02-08 11:40:22.510362"
}
STDOUT:
podman
podman-default-kube-network
systemd-quadlet-demo
TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177
Saturday 08 February 2025 11:40:22 -0500 (0:00:00.422) 0:01:57.623 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187
Saturday 08 February 2025 11:40:23 -0500 (0:00:00.413) 0:01:58.036 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - services] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197
Saturday 08 February 2025 11:40:23 -0500 (0:00:00.439) 0:01:58.476 *****
ok: [managed-node1] => {
"ansible_facts": {
"services": {
"05159afa0f593fffd2274cad1533e16d9907a26d6e0621fcc4c448b5b2ab088f-46ffaa87b097264a.service": {
"name": "05159afa0f593fffd2274cad1533e16d9907a26d6e0621fcc4c448b5b2ab088f-46ffaa87b097264a.service",
"source": "systemd",
"state": "stopped",
"status": "failed"
},
"NetworkManager-dispatcher.service": {
"name": "NetworkManager-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"NetworkManager-wait-online.service": {
"name": "NetworkManager-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"NetworkManager.service": {
"name": "NetworkManager.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"apt-daily.service": {
"name": "apt-daily.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"audit-rules.service": {
"name": "audit-rules.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"auditd.service": {
"name": "auditd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"auth-rpcgss-module.service": {
"name": "auth-rpcgss-module.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"autofs.service": {
"name": "autofs.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"autovt@.service": {
"name": "autovt@.service",
"source": "systemd",
"state": "unknown",
"status": "alias"
},
"blk-availability.service": {
"name": "blk-availability.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"capsule@.service": {
"name": "capsule@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"certmonger.service": {
"name": "certmonger.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"chrony-wait.service": {
"name": "chrony-wait.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"chronyd-restricted.service": {
"name": "chronyd-restricted.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"chronyd.service": {
"name": "chronyd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"cloud-config.service": {
"name": "cloud-config.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-final.service": {
"name": "cloud-final.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init-hotplugd.service": {
"name": "cloud-init-hotplugd.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"cloud-init-local.service": {
"name": "cloud-init-local.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init.service": {
"name": "cloud-init.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"console-getty.service": {
"name": "console-getty.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"container-getty@.service": {
"name": "container-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"crond.service": {
"name": "crond.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"dbus-broker.service": {
"name": "dbus-broker.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"dbus-org.fedoraproject.FirewallD1.service": {
"name": "dbus-org.fedoraproject.FirewallD1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.hostname1.service": {
"name": "dbus-org.freedesktop.hostname1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.locale1.service": {
"name": "dbus-org.freedesktop.locale1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.login1.service": {
"name": "dbus-org.freedesktop.login1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.nm-dispatcher.service": {
"name": "dbus-org.freedesktop.nm-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.timedate1.service": {
"name": "dbus-org.freedesktop.timedate1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus.service": {
"name": "dbus.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"debug-shell.service": {
"name": "debug-shell.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dhcpcd.service": {
"name": "dhcpcd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dhcpcd@.service": {
"name": "dhcpcd@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"display-manager.service": {
"name": "display-manager.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"dm-event.service": {
"name": "dm-event.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dnf-makecache.service": {
"name": "dnf-makecache.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dnf-system-upgrade-cleanup.service": {
"name": "dnf-system-upgrade-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dnf-system-upgrade.service": {
"name": "dnf-system-upgrade.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dracut-cmdline.service": {
"name": "dracut-cmdline.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-initqueue.service": {
"name": "dracut-initqueue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-mount.service": {
"name": "dracut-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-mount.service": {
"name": "dracut-pre-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-pivot.service": {
"name": "dracut-pre-pivot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-trigger.service": {
"name": "dracut-pre-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-udev.service": {
"name": "dracut-pre-udev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown-onfailure.service": {
"name": "dracut-shutdown-onfailure.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown.service": {
"name": "dracut-shutdown.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ebtables.service": {
"name": "ebtables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"emergency.service": {
"name": "emergency.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"fips-crypto-policy-overlay.service": {
"name": "fips-crypto-policy-overlay.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"firewalld.service": {
"name": "firewalld.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"fsidd.service": {
"name": "fsidd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"fstrim.service": {
"name": "fstrim.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"getty@.service": {
"name": "getty@.service",
"source": "systemd",
"state": "unknown",
"status": "enabled"
},
"getty@tty1.service": {
"name": "getty@tty1.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"grub-boot-indeterminate.service": {
"name": "grub-boot-indeterminate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"grub2-systemd-integration.service": {
"name": "grub2-systemd-integration.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"gssproxy.service": {
"name": "gssproxy.service",
"source": "systemd",
"state": "running",
"status": "disabled"
},
"hv_kvp_daemon.service": {
"name": "hv_kvp_daemon.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"initrd-cleanup.service": {
"name": "initrd-cleanup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-parse-etc.service": {
"name": "initrd-parse-etc.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-switch-root.service": {
"name": "initrd-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-udevadm-cleanup-db.service": {
"name": "initrd-udevadm-cleanup-db.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ip6tables.service": {
"name": "ip6tables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ipset.service": {
"name": "ipset.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"iptables.service": {
"name": "iptables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"irqbalance.service": {
"name": "irqbalance.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"kdump.service": {
"name": "kdump.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"kmod-static-nodes.service": {
"name": "kmod-static-nodes.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"kvm_stat.service": {
"name": "kvm_stat.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"ldconfig.service": {
"name": "ldconfig.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"logrotate.service": {
"name": "logrotate.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"lvm-devices-import.service": {
"name": "lvm-devices-import.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"lvm2-lvmpolld.service": {
"name": "lvm2-lvmpolld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"lvm2-monitor.service": {
"name": "lvm2-monitor.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"man-db-cache-update.service": {
"name": "man-db-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"man-db-restart-cache-update.service": {
"name": "man-db-restart-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"microcode.service": {
"name": "microcode.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"modprobe@.service": {
"name": "modprobe@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"modprobe@configfs.service": {
"name": "modprobe@configfs.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@dm_mod.service": {
"name": "modprobe@dm_mod.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@drm.service": {
"name": "modprobe@drm.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@efi_pstore.service": {
"name": "modprobe@efi_pstore.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@fuse.service": {
"name": "modprobe@fuse.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@loop.service": {
"name": "modprobe@loop.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"netavark-dhcp-proxy.service": {
"name": "netavark-dhcp-proxy.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"netavark-firewalld-reload.service": {
"name": "netavark-firewalld-reload.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-blkmap.service": {
"name": "nfs-blkmap.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-idmapd.service": {
"name": "nfs-idmapd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-mountd.service": {
"name": "nfs-mountd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-server.service": {
"name": "nfs-server.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"nfs-utils.service": {
"name": "nfs-utils.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfsdcld.service": {
"name": "nfsdcld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nftables.service": {
"name": "nftables.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nis-domainname.service": {
"name": "nis-domainname.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nm-priv-helper.service": {
"name": "nm-priv-helper.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"ntpd.service": {
"name": "ntpd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ntpdate.service": {
"name": "ntpdate.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"pam_namespace.service": {
"name": "pam_namespace.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"pcscd.service": {
"name": "pcscd.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"plymouth-quit-wait.service": {
"name": "plymouth-quit-wait.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"plymouth-start.service": {
"name": "plymouth-start.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"podman-auto-update.service": {
"name": "podman-auto-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman-clean-transient.service": {
"name": "podman-clean-transient.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman-kube@.service": {
"name": "podman-kube@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"podman-restart.service": {
"name": "podman-restart.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman.service": {
"name": "podman.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"polkit.service": {
"name": "polkit.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"qemu-guest-agent.service": {
"name": "qemu-guest-agent.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"quadlet-demo-mysql-volume.service": {
"name": "quadlet-demo-mysql-volume.service",
"source": "systemd",
"state": "stopped",
"status": "generated"
},
"quadlet-demo-mysql.service": {
"name": "quadlet-demo-mysql.service",
"source": "systemd",
"state": "running",
"status": "generated"
},
"quadlet-demo-network.service": {
"name": "quadlet-demo-network.service",
"source": "systemd",
"state": "stopped",
"status": "generated"
},
"quotaon-root.service": {
"name": "quotaon-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"quotaon@.service": {
"name": "quotaon@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"rc-local.service": {
"name": "rc-local.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rescue.service": {
"name": "rescue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"restraintd.service": {
"name": "restraintd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rngd.service": {
"name": "rngd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rpc-gssd.service": {
"name": "rpc-gssd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd-notify.service": {
"name": "rpc-statd-notify.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd.service": {
"name": "rpc-statd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-svcgssd.service": {
"name": "rpc-svcgssd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"rpcbind.service": {
"name": "rpcbind.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rpmdb-migrate.service": {
"name": "rpmdb-migrate.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"rpmdb-rebuild.service": {
"name": "rpmdb-rebuild.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"rsyslog.service": {
"name": "rsyslog.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"selinux-autorelabel-mark.service": {
"name": "selinux-autorelabel-mark.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"selinux-autorelabel.service": {
"name": "selinux-autorelabel.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"selinux-check-proper-disable.service": {
"name": "selinux-check-proper-disable.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"serial-getty@.service": {
"name": "serial-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "indirect"
},
"serial-getty@ttyS0.service": {
"name": "serial-getty@ttyS0.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"sntp.service": {
"name": "sntp.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ssh-host-keys-migration.service": {
"name": "ssh-host-keys-migration.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"sshd-keygen.service": {
"name": "sshd-keygen.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"sshd-keygen@.service": {
"name": "sshd-keygen@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"sshd-keygen@ecdsa.service": {
"name": "sshd-keygen@ecdsa.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-keygen@ed25519.service": {
"name": "sshd-keygen@ed25519.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-keygen@rsa.service": {
"name": "sshd-keygen@rsa.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-unix-local@.service": {
"name": "sshd-unix-local@.service",
"source": "systemd",
"state": "unknown",
"status": "alias"
},
"sshd-vsock@.service": {
"name": "sshd-vsock@.service",
"source": "systemd",
"state": "unknown",
"status": "alias"
},
"sshd.service": {
"name": "sshd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"sshd@.service": {
"name": "sshd@.service",
"source": "systemd",
"state": "unknown",
"status": "indirect"
},
"sssd-autofs.service": {
"name": "sssd-autofs.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-kcm.service": {
"name": "sssd-kcm.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"sssd-nss.service": {
"name": "sssd-nss.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pac.service": {
"name": "sssd-pac.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pam.service": {
"name": "sssd-pam.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-ssh.service": {
"name": "sssd-ssh.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-sudo.service": {
"name": "sssd-sudo.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd.service": {
"name": "sssd.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"syslog.service": {
"name": "syslog.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"system-update-cleanup.service": {
"name": "system-update-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-ask-password-console.service": {
"name": "systemd-ask-password-console.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-wall.service": {
"name": "systemd-ask-password-wall.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-backlight@.service": {
"name": "systemd-backlight@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-battery-check.service": {
"name": "systemd-battery-check.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-binfmt.service": {
"name": "systemd-binfmt.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-bless-boot.service": {
"name": "systemd-bless-boot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-boot-check-no-failures.service": {
"name": "systemd-boot-check-no-failures.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-boot-random-seed.service": {
"name": "systemd-boot-random-seed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-boot-update.service": {
"name": "systemd-boot-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-bootctl@.service": {
"name": "systemd-bootctl@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-confext.service": {
"name": "systemd-confext.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-coredump@.service": {
"name": "systemd-coredump@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-creds@.service": {
"name": "systemd-creds@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-exit.service": {
"name": "systemd-exit.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-firstboot.service": {
"name": "systemd-firstboot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck-root.service": {
"name": "systemd-fsck-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck@.service": {
"name": "systemd-fsck@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-growfs-root.service": {
"name": "systemd-growfs-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-growfs@.service": {
"name": "systemd-growfs@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-halt.service": {
"name": "systemd-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hibernate-clear.service": {
"name": "systemd-hibernate-clear.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hibernate-resume.service": {
"name": "systemd-hibernate-resume.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hibernate.service": {
"name": "systemd-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hostnamed.service": {
"name": "systemd-hostnamed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hwdb-update.service": {
"name": "systemd-hwdb-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hybrid-sleep.service": {
"name": "systemd-hybrid-sleep.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-initctl.service": {
"name": "systemd-initctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-catalog-update.service": {
"name": "systemd-journal-catalog-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-flush.service": {
"name": "systemd-journal-flush.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journald-sync@.service": {
"name": "systemd-journald-sync@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-journald.service": {
"name": "systemd-journald.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-journald@.service": {
"name": "systemd-journald@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-kexec.service": {
"name": "systemd-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-localed.service": {
"name": "systemd-localed.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-logind.service": {
"name": "systemd-logind.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-machine-id-commit.service": {
"name": "systemd-machine-id-commit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-modules-load.service": {
"name": "systemd-modules-load.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-network-generator.service": {
"name": "systemd-network-generator.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-networkd-wait-online.service": {
"name": "systemd-networkd-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"systemd-oomd.service": {
"name": "systemd-oomd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"systemd-pcrextend@.service": {
"name": "systemd-pcrextend@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-pcrfs-root.service": {
"name": "systemd-pcrfs-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-pcrfs@.service": {
"name": "systemd-pcrfs@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-pcrlock-file-system.service": {
"name": "systemd-pcrlock-file-system.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-firmware-code.service": {
"name": "systemd-pcrlock-firmware-code.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-firmware-config.service": {
"name": "systemd-pcrlock-firmware-config.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-machine-id.service": {
"name": "systemd-pcrlock-machine-id.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-make-policy.service": {
"name": "systemd-pcrlock-make-policy.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-secureboot-authority.service": {
"name": "systemd-pcrlock-secureboot-authority.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-secureboot-policy.service": {
"name": "systemd-pcrlock-secureboot-policy.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock@.service": {
"name": "systemd-pcrlock@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-pcrmachine.service": {
"name": "systemd-pcrmachine.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-pcrphase-initrd.service": {
"name": "systemd-pcrphase-initrd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-pcrphase-sysinit.service": {
"name": "systemd-pcrphase-sysinit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-pcrphase.service": {
"name": "systemd-pcrphase.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-poweroff.service": {
"name": "systemd-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-pstore.service": {
"name": "systemd-pstore.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-quotacheck-root.service": {
"name": "systemd-quotacheck-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-quotacheck@.service": {
"name": "systemd-quotacheck@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-random-seed.service": {
"name": "systemd-random-seed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-reboot.service": {
"name": "systemd-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-remount-fs.service": {
"name": "systemd-remount-fs.service",
"source": "systemd",
"state": "stopped",
"status": "enabled-runtime"
},
"systemd-repart.service": {
"name": "systemd-repart.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-rfkill.service": {
"name": "systemd-rfkill.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-soft-reboot.service": {
"name": "systemd-soft-reboot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-suspend-then-hibernate.service": {
"name": "systemd-suspend-then-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend.service": {
"name": "systemd-suspend.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-sysctl.service": {
"name": "systemd-sysctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-sysext.service": {
"name": "systemd-sysext.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-sysext@.service": {
"name": "systemd-sysext@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-sysupdate-reboot.service": {
"name": "systemd-sysupdate-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"systemd-sysupdate.service": {
"name": "systemd-sysupdate.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"systemd-sysusers.service": {
"name": "systemd-sysusers.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-timedated.service": {
"name": "systemd-timedated.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-timesyncd.service": {
"name": "systemd-timesyncd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"systemd-tmpfiles-clean.service": {
"name": "systemd-tmpfiles-clean.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup-dev-early.service": {
"name": "systemd-tmpfiles-setup-dev-early.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup-dev.service": {
"name": "systemd-tmpfiles-setup-dev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup.service": {
"name": "systemd-tmpfiles-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tpm2-setup-early.service": {
"name": "systemd-tpm2-setup-early.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tpm2-setup.service": {
"name": "systemd-tpm2-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udev-load-credentials.service": {
"name": "systemd-udev-load-credentials.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"systemd-udev-settle.service": {
"name": "systemd-udev-settle.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udev-trigger.service": {
"name": "systemd-udev-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udevd.service": {
"name": "systemd-udevd.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-update-done.service": {
"name": "systemd-update-done.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp-runlevel.service": {
"name": "systemd-update-utmp-runlevel.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp.service": {
"name": "systemd-update-utmp.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-user-sessions.service": {
"name": "systemd-user-sessions.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-vconsole-setup.service": {
"name": "systemd-vconsole-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-volatile-root.service": {
"name": "systemd-volatile-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"user-runtime-dir@.service": {
"name": "user-runtime-dir@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user-runtime-dir@0.service": {
"name": "user-runtime-dir@0.service",
"source": "systemd",
"state": "stopped",
"status": "active"
},
"user@.service": {
"name": "user@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user@0.service": {
"name": "user@0.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"ypbind.service": {
"name": "ypbind.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
}
}
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116
Saturday 08 February 2025 11:40:25 -0500 (0:00:02.154) 0:02:00.631 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state != \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Saturday 08 February 2025 11:40:25 -0500 (0:00:00.054) 0:02:00.685 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "envoy-proxy-configmap.yml",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "---\napiVersion: v1\nkind: ConfigMap\nmetadata:\n name: envoy-proxy-config\ndata:\n envoy.yaml: |\n admin:\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 9901\n\n static_resources:\n listeners:\n - name: listener_0\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 8080\n filter_chains:\n - filters:\n - name: envoy.filters.network.http_connection_manager\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager\n stat_prefix: ingress_http\n codec_type: AUTO\n route_config:\n name: local_route\n virtual_hosts:\n - name: local_service\n domains: [\"*\"]\n routes:\n - match:\n prefix: \"/\"\n route:\n cluster: backend\n http_filters:\n - name: envoy.filters.http.router\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.http.router.v3.Router\n transport_socket:\n name: envoy.transport_sockets.tls\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.transport_sockets.tls.v3.DownstreamTlsContext\n common_tls_context:\n tls_certificates:\n - certificate_chain:\n filename: /etc/envoy-certificates/certificate.pem\n private_key:\n filename: /etc/envoy-certificates/certificate.key\n clusters:\n - name: backend\n connect_timeout: 5s\n type: STATIC\n dns_refresh_rate: 1800s\n lb_policy: ROUND_ROBIN\n load_assignment:\n cluster_name: backend\n endpoints:\n - lb_endpoints:\n - endpoint:\n address:\n socket_address:\n address: 127.0.0.1\n port_value: 80",
"__podman_quadlet_template_src": ""
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Saturday 08 February 2025 11:40:25 -0500 (0:00:00.074) 0:02:00.760 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "absent",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Saturday 08 February 2025 11:40:25 -0500 (0:00:00.065) 0:02:00.825 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Saturday 08 February 2025 11:40:25 -0500 (0:00:00.054) 0:02:00.880 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "envoy-proxy-configmap",
"__podman_quadlet_type": "yml",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Saturday 08 February 2025 11:40:25 -0500 (0:00:00.084) 0:02:00.965 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 08 February 2025 11:40:26 -0500 (0:00:00.111) 0:02:01.077 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 08 February 2025 11:40:26 -0500 (0:00:00.134) 0:02:01.211 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 08 February 2025 11:40:26 -0500 (0:00:00.052) 0:02:01.264 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Saturday 08 February 2025 11:40:26 -0500 (0:00:00.050) 0:02:01.314 *****
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1739032452.5561087,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
"ctime": 1739032446.715072,
"dev": 51714,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 9150722,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-pie-executable",
"mode": "0755",
"mtime": 1730678400.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 15744,
"uid": 0,
"version": "796201794",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Saturday 08 February 2025 11:40:26 -0500 (0:00:00.384) 0:02:01.698 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Saturday 08 February 2025 11:40:26 -0500 (0:00:00.036) 0:02:01.735 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Saturday 08 February 2025 11:40:26 -0500 (0:00:00.034) 0:02:01.769 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Saturday 08 February 2025 11:40:26 -0500 (0:00:00.034) 0:02:01.804 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Saturday 08 February 2025 11:40:26 -0500 (0:00:00.032) 0:02:01.837 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Saturday 08 February 2025 11:40:26 -0500 (0:00:00.034) 0:02:01.872 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Saturday 08 February 2025 11:40:26 -0500 (0:00:00.034) 0:02:01.906 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Saturday 08 February 2025 11:40:26 -0500 (0:00:00.032) 0:02:01.939 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Saturday 08 February 2025 11:40:26 -0500 (0:00:00.033) 0:02:01.973 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Saturday 08 February 2025 11:40:27 -0500 (0:00:00.054) 0:02:02.027 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Saturday 08 February 2025 11:40:27 -0500 (0:00:00.034) 0:02:02.062 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state != \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Saturday 08 February 2025 11:40:27 -0500 (0:00:00.031) 0:02:02.093 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [],
"__podman_quadlet_file": "/etc/containers/systemd/envoy-proxy-configmap.yml",
"__podman_volumes": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105
Saturday 08 February 2025 11:40:27 -0500 (0:00:00.075) 0:02:02.168 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112
Saturday 08 February 2025 11:40:27 -0500 (0:00:00.038) 0:02:02.207 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4
Saturday 08 February 2025 11:40:27 -0500 (0:00:00.120) 0:02:02.327 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stop and disable service] *************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12
Saturday 08 February 2025 11:40:27 -0500 (0:00:00.040) 0:02:02.368 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_service_name | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : See if quadlet file exists] ***********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33
Saturday 08 February 2025 11:40:27 -0500 (0:00:00.034) 0:02:02.402 *****
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1739032785.0298696,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 8,
"charset": "us-ascii",
"checksum": "d681c7d56f912150d041873e880818b22a90c188",
"ctime": 1739032758.5357351,
"dev": 51714,
"device_type": 0,
"executable": false,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 666894619,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "text/plain",
"mode": "0644",
"mtime": 1739032758.2747338,
"nlink": 1,
"path": "/etc/containers/systemd/envoy-proxy-configmap.yml",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 2102,
"uid": 0,
"version": "100079390",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": false,
"xoth": false,
"xusr": false
}
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38
Saturday 08 February 2025 11:40:27 -0500 (0:00:00.383) 0:02:02.786 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Slurp quadlet file] *******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6
Saturday 08 February 2025 11:40:27 -0500 (0:00:00.064) 0:02:02.850 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12
Saturday 08 February 2025 11:40:28 -0500 (0:00:00.382) 0:02:03.232 *****
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44
Saturday 08 February 2025 11:40:28 -0500 (0:00:00.055) 0:02:03.287 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Reset raw variable] *******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52
Saturday 08 February 2025 11:40:28 -0500 (0:00:00.051) 0:02:03.339 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_raw": null
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Remove quadlet file] ******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42
Saturday 08 February 2025 11:40:28 -0500 (0:00:00.050) 0:02:03.390 *****
changed: [managed-node1] => {
"changed": true,
"path": "/etc/containers/systemd/envoy-proxy-configmap.yml",
"state": "absent"
}
TASK [fedora.linux_system_roles.podman : Refresh systemd] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48
Saturday 08 February 2025 11:40:28 -0500 (0:00:00.396) 0:02:03.786 *****
ok: [managed-node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Remove managed resource] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58
Saturday 08 February 2025 11:40:29 -0500 (0:00:00.777) 0:02:04.564 *****
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Remove volumes] ***********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99
Saturday 08 February 2025 11:40:29 -0500 (0:00:00.036) 0:02:04.600 *****
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] *********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116
Saturday 08 February 2025 11:40:29 -0500 (0:00:00.057) 0:02:04.657 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_parsed": null
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120
Saturday 08 February 2025 11:40:29 -0500 (0:00:00.054) 0:02:04.712 *****
changed: [managed-node1] => {
"changed": true,
"cmd": [
"podman",
"image",
"prune",
"--all",
"-f"
],
"delta": "0:00:00.029684",
"end": "2025-02-08 11:40:30.039303",
"rc": 0,
"start": "2025-02-08 11:40:30.009619"
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131
Saturday 08 February 2025 11:40:30 -0500 (0:00:00.485) 0:02:05.197 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 08 February 2025 11:40:30 -0500 (0:00:00.071) 0:02:05.269 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 08 February 2025 11:40:30 -0500 (0:00:00.037) 0:02:05.307 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 08 February 2025 11:40:30 -0500 (0:00:00.034) 0:02:05.341 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - images] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141
Saturday 08 February 2025 11:40:30 -0500 (0:00:00.031) 0:02:05.373 *****
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"images",
"-n"
],
"delta": "0:00:00.033144",
"end": "2025-02-08 11:40:30.699834",
"rc": 0,
"start": "2025-02-08 11:40:30.666690"
}
STDOUT:
quay.io/libpod/registry 2.8.2 0030ba3d620c 18 months ago 24.6 MB
quay.io/linux-system-roles/mysql 5.6 dd3b2a5dcb48 3 years ago 308 MB
TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150
Saturday 08 February 2025 11:40:30 -0500 (0:00:00.433) 0:02:05.806 *****
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"volume",
"ls",
"-n"
],
"delta": "0:00:00.028721",
"end": "2025-02-08 11:40:31.156015",
"rc": 0,
"start": "2025-02-08 11:40:31.127294"
}
STDOUT:
local 2ce71b63066ed4402ab34684197d9d1868b43cd94dbf701d92e842ee487e7893
local systemd-quadlet-demo-mysql
TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159
Saturday 08 February 2025 11:40:31 -0500 (0:00:00.454) 0:02:06.261 *****
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"ps",
"--noheading"
],
"delta": "0:00:00.033740",
"end": "2025-02-08 11:40:31.625208",
"rc": 0,
"start": "2025-02-08 11:40:31.591468"
}
STDOUT:
93148221a3fd quay.io/libpod/registry:2.8.2 /etc/docker/regis... 5 minutes ago Up 5 minutes 127.0.0.1:5000->5000/tcp podman_registry
05159afa0f59 quay.io/linux-system-roles/mysql:5.6 mysqld About a minute ago Up About a minute (healthy) 3306/tcp quadlet-demo-mysql
TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168
Saturday 08 February 2025 11:40:31 -0500 (0:00:00.448) 0:02:06.709 *****
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"network",
"ls",
"-n",
"-q"
],
"delta": "0:00:00.029248",
"end": "2025-02-08 11:40:32.038585",
"rc": 0,
"start": "2025-02-08 11:40:32.009337"
}
STDOUT:
podman
podman-default-kube-network
systemd-quadlet-demo
TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177
Saturday 08 February 2025 11:40:32 -0500 (0:00:00.444) 0:02:07.153 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187
Saturday 08 February 2025 11:40:32 -0500 (0:00:00.425) 0:02:07.579 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - services] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197
Saturday 08 February 2025 11:40:32 -0500 (0:00:00.427) 0:02:08.006 *****
ok: [managed-node1] => {
"ansible_facts": {
"services": {
"05159afa0f593fffd2274cad1533e16d9907a26d6e0621fcc4c448b5b2ab088f-46ffaa87b097264a.service": {
"name": "05159afa0f593fffd2274cad1533e16d9907a26d6e0621fcc4c448b5b2ab088f-46ffaa87b097264a.service",
"source": "systemd",
"state": "stopped",
"status": "failed"
},
"NetworkManager-dispatcher.service": {
"name": "NetworkManager-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"NetworkManager-wait-online.service": {
"name": "NetworkManager-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"NetworkManager.service": {
"name": "NetworkManager.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"apt-daily.service": {
"name": "apt-daily.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"audit-rules.service": {
"name": "audit-rules.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"auditd.service": {
"name": "auditd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"auth-rpcgss-module.service": {
"name": "auth-rpcgss-module.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"autofs.service": {
"name": "autofs.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"autovt@.service": {
"name": "autovt@.service",
"source": "systemd",
"state": "unknown",
"status": "alias"
},
"blk-availability.service": {
"name": "blk-availability.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"capsule@.service": {
"name": "capsule@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"certmonger.service": {
"name": "certmonger.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"chrony-wait.service": {
"name": "chrony-wait.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"chronyd-restricted.service": {
"name": "chronyd-restricted.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"chronyd.service": {
"name": "chronyd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"cloud-config.service": {
"name": "cloud-config.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-final.service": {
"name": "cloud-final.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init-hotplugd.service": {
"name": "cloud-init-hotplugd.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"cloud-init-local.service": {
"name": "cloud-init-local.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init.service": {
"name": "cloud-init.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"console-getty.service": {
"name": "console-getty.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"container-getty@.service": {
"name": "container-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"crond.service": {
"name": "crond.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"dbus-broker.service": {
"name": "dbus-broker.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"dbus-org.fedoraproject.FirewallD1.service": {
"name": "dbus-org.fedoraproject.FirewallD1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.hostname1.service": {
"name": "dbus-org.freedesktop.hostname1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.locale1.service": {
"name": "dbus-org.freedesktop.locale1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.login1.service": {
"name": "dbus-org.freedesktop.login1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.nm-dispatcher.service": {
"name": "dbus-org.freedesktop.nm-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.timedate1.service": {
"name": "dbus-org.freedesktop.timedate1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus.service": {
"name": "dbus.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"debug-shell.service": {
"name": "debug-shell.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dhcpcd.service": {
"name": "dhcpcd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dhcpcd@.service": {
"name": "dhcpcd@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"display-manager.service": {
"name": "display-manager.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"dm-event.service": {
"name": "dm-event.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dnf-makecache.service": {
"name": "dnf-makecache.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dnf-system-upgrade-cleanup.service": {
"name": "dnf-system-upgrade-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dnf-system-upgrade.service": {
"name": "dnf-system-upgrade.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dracut-cmdline.service": {
"name": "dracut-cmdline.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-initqueue.service": {
"name": "dracut-initqueue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-mount.service": {
"name": "dracut-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-mount.service": {
"name": "dracut-pre-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-pivot.service": {
"name": "dracut-pre-pivot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-trigger.service": {
"name": "dracut-pre-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-udev.service": {
"name": "dracut-pre-udev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown-onfailure.service": {
"name": "dracut-shutdown-onfailure.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown.service": {
"name": "dracut-shutdown.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ebtables.service": {
"name": "ebtables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"emergency.service": {
"name": "emergency.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"fips-crypto-policy-overlay.service": {
"name": "fips-crypto-policy-overlay.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"firewalld.service": {
"name": "firewalld.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"fsidd.service": {
"name": "fsidd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"fstrim.service": {
"name": "fstrim.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"getty@.service": {
"name": "getty@.service",
"source": "systemd",
"state": "unknown",
"status": "enabled"
},
"getty@tty1.service": {
"name": "getty@tty1.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"grub-boot-indeterminate.service": {
"name": "grub-boot-indeterminate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"grub2-systemd-integration.service": {
"name": "grub2-systemd-integration.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"gssproxy.service": {
"name": "gssproxy.service",
"source": "systemd",
"state": "running",
"status": "disabled"
},
"hv_kvp_daemon.service": {
"name": "hv_kvp_daemon.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"initrd-cleanup.service": {
"name": "initrd-cleanup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-parse-etc.service": {
"name": "initrd-parse-etc.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-switch-root.service": {
"name": "initrd-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-udevadm-cleanup-db.service": {
"name": "initrd-udevadm-cleanup-db.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ip6tables.service": {
"name": "ip6tables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ipset.service": {
"name": "ipset.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"iptables.service": {
"name": "iptables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"irqbalance.service": {
"name": "irqbalance.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"kdump.service": {
"name": "kdump.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"kmod-static-nodes.service": {
"name": "kmod-static-nodes.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"kvm_stat.service": {
"name": "kvm_stat.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"ldconfig.service": {
"name": "ldconfig.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"logrotate.service": {
"name": "logrotate.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"lvm-devices-import.service": {
"name": "lvm-devices-import.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"lvm2-lvmpolld.service": {
"name": "lvm2-lvmpolld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"lvm2-monitor.service": {
"name": "lvm2-monitor.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"man-db-cache-update.service": {
"name": "man-db-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"man-db-restart-cache-update.service": {
"name": "man-db-restart-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"microcode.service": {
"name": "microcode.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"modprobe@.service": {
"name": "modprobe@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"modprobe@configfs.service": {
"name": "modprobe@configfs.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@dm_mod.service": {
"name": "modprobe@dm_mod.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@drm.service": {
"name": "modprobe@drm.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@efi_pstore.service": {
"name": "modprobe@efi_pstore.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@fuse.service": {
"name": "modprobe@fuse.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@loop.service": {
"name": "modprobe@loop.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"netavark-dhcp-proxy.service": {
"name": "netavark-dhcp-proxy.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"netavark-firewalld-reload.service": {
"name": "netavark-firewalld-reload.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-blkmap.service": {
"name": "nfs-blkmap.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-idmapd.service": {
"name": "nfs-idmapd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-mountd.service": {
"name": "nfs-mountd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-server.service": {
"name": "nfs-server.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"nfs-utils.service": {
"name": "nfs-utils.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfsdcld.service": {
"name": "nfsdcld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nftables.service": {
"name": "nftables.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nis-domainname.service": {
"name": "nis-domainname.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nm-priv-helper.service": {
"name": "nm-priv-helper.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"ntpd.service": {
"name": "ntpd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ntpdate.service": {
"name": "ntpdate.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"pam_namespace.service": {
"name": "pam_namespace.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"pcscd.service": {
"name": "pcscd.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"plymouth-quit-wait.service": {
"name": "plymouth-quit-wait.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"plymouth-start.service": {
"name": "plymouth-start.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"podman-auto-update.service": {
"name": "podman-auto-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman-clean-transient.service": {
"name": "podman-clean-transient.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman-kube@.service": {
"name": "podman-kube@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"podman-restart.service": {
"name": "podman-restart.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman.service": {
"name": "podman.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"polkit.service": {
"name": "polkit.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"qemu-guest-agent.service": {
"name": "qemu-guest-agent.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"quadlet-demo-mysql-volume.service": {
"name": "quadlet-demo-mysql-volume.service",
"source": "systemd",
"state": "stopped",
"status": "generated"
},
"quadlet-demo-mysql.service": {
"name": "quadlet-demo-mysql.service",
"source": "systemd",
"state": "running",
"status": "generated"
},
"quadlet-demo-network.service": {
"name": "quadlet-demo-network.service",
"source": "systemd",
"state": "stopped",
"status": "generated"
},
"quotaon-root.service": {
"name": "quotaon-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"quotaon@.service": {
"name": "quotaon@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"rc-local.service": {
"name": "rc-local.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rescue.service": {
"name": "rescue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"restraintd.service": {
"name": "restraintd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rngd.service": {
"name": "rngd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rpc-gssd.service": {
"name": "rpc-gssd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd-notify.service": {
"name": "rpc-statd-notify.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd.service": {
"name": "rpc-statd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-svcgssd.service": {
"name": "rpc-svcgssd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"rpcbind.service": {
"name": "rpcbind.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rpmdb-migrate.service": {
"name": "rpmdb-migrate.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"rpmdb-rebuild.service": {
"name": "rpmdb-rebuild.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"rsyslog.service": {
"name": "rsyslog.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"selinux-autorelabel-mark.service": {
"name": "selinux-autorelabel-mark.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"selinux-autorelabel.service": {
"name": "selinux-autorelabel.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"selinux-check-proper-disable.service": {
"name": "selinux-check-proper-disable.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"serial-getty@.service": {
"name": "serial-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "indirect"
},
"serial-getty@ttyS0.service": {
"name": "serial-getty@ttyS0.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"sntp.service": {
"name": "sntp.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ssh-host-keys-migration.service": {
"name": "ssh-host-keys-migration.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"sshd-keygen.service": {
"name": "sshd-keygen.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"sshd-keygen@.service": {
"name": "sshd-keygen@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"sshd-keygen@ecdsa.service": {
"name": "sshd-keygen@ecdsa.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-keygen@ed25519.service": {
"name": "sshd-keygen@ed25519.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-keygen@rsa.service": {
"name": "sshd-keygen@rsa.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-unix-local@.service": {
"name": "sshd-unix-local@.service",
"source": "systemd",
"state": "unknown",
"status": "alias"
},
"sshd-vsock@.service": {
"name": "sshd-vsock@.service",
"source": "systemd",
"state": "unknown",
"status": "alias"
},
"sshd.service": {
"name": "sshd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"sshd@.service": {
"name": "sshd@.service",
"source": "systemd",
"state": "unknown",
"status": "indirect"
},
"sssd-autofs.service": {
"name": "sssd-autofs.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-kcm.service": {
"name": "sssd-kcm.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"sssd-nss.service": {
"name": "sssd-nss.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pac.service": {
"name": "sssd-pac.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pam.service": {
"name": "sssd-pam.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-ssh.service": {
"name": "sssd-ssh.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-sudo.service": {
"name": "sssd-sudo.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd.service": {
"name": "sssd.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"syslog.service": {
"name": "syslog.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"system-update-cleanup.service": {
"name": "system-update-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-ask-password-console.service": {
"name": "systemd-ask-password-console.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-wall.service": {
"name": "systemd-ask-password-wall.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-backlight@.service": {
"name": "systemd-backlight@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-battery-check.service": {
"name": "systemd-battery-check.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-binfmt.service": {
"name": "systemd-binfmt.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-bless-boot.service": {
"name": "systemd-bless-boot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-boot-check-no-failures.service": {
"name": "systemd-boot-check-no-failures.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-boot-random-seed.service": {
"name": "systemd-boot-random-seed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-boot-update.service": {
"name": "systemd-boot-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-bootctl@.service": {
"name": "systemd-bootctl@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-confext.service": {
"name": "systemd-confext.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-coredump@.service": {
"name": "systemd-coredump@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-creds@.service": {
"name": "systemd-creds@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-exit.service": {
"name": "systemd-exit.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-firstboot.service": {
"name": "systemd-firstboot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck-root.service": {
"name": "systemd-fsck-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck@.service": {
"name": "systemd-fsck@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-growfs-root.service": {
"name": "systemd-growfs-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-growfs@.service": {
"name": "systemd-growfs@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-halt.service": {
"name": "systemd-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hibernate-clear.service": {
"name": "systemd-hibernate-clear.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hibernate-resume.service": {
"name": "systemd-hibernate-resume.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hibernate.service": {
"name": "systemd-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hostnamed.service": {
"name": "systemd-hostnamed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hwdb-update.service": {
"name": "systemd-hwdb-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hybrid-sleep.service": {
"name": "systemd-hybrid-sleep.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-initctl.service": {
"name": "systemd-initctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-catalog-update.service": {
"name": "systemd-journal-catalog-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-flush.service": {
"name": "systemd-journal-flush.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journald-sync@.service": {
"name": "systemd-journald-sync@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-journald.service": {
"name": "systemd-journald.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-journald@.service": {
"name": "systemd-journald@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-kexec.service": {
"name": "systemd-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-localed.service": {
"name": "systemd-localed.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-logind.service": {
"name": "systemd-logind.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-machine-id-commit.service": {
"name": "systemd-machine-id-commit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-modules-load.service": {
"name": "systemd-modules-load.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-network-generator.service": {
"name": "systemd-network-generator.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-networkd-wait-online.service": {
"name": "systemd-networkd-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"systemd-oomd.service": {
"name": "systemd-oomd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"systemd-pcrextend@.service": {
"name": "systemd-pcrextend@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-pcrfs-root.service": {
"name": "systemd-pcrfs-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-pcrfs@.service": {
"name": "systemd-pcrfs@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-pcrlock-file-system.service": {
"name": "systemd-pcrlock-file-system.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-firmware-code.service": {
"name": "systemd-pcrlock-firmware-code.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-firmware-config.service": {
"name": "systemd-pcrlock-firmware-config.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-machine-id.service": {
"name": "systemd-pcrlock-machine-id.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-make-policy.service": {
"name": "systemd-pcrlock-make-policy.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-secureboot-authority.service": {
"name": "systemd-pcrlock-secureboot-authority.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-secureboot-policy.service": {
"name": "systemd-pcrlock-secureboot-policy.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock@.service": {
"name": "systemd-pcrlock@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-pcrmachine.service": {
"name": "systemd-pcrmachine.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-pcrphase-initrd.service": {
"name": "systemd-pcrphase-initrd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-pcrphase-sysinit.service": {
"name": "systemd-pcrphase-sysinit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-pcrphase.service": {
"name": "systemd-pcrphase.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-poweroff.service": {
"name": "systemd-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-pstore.service": {
"name": "systemd-pstore.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-quotacheck-root.service": {
"name": "systemd-quotacheck-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-quotacheck@.service": {
"name": "systemd-quotacheck@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-random-seed.service": {
"name": "systemd-random-seed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-reboot.service": {
"name": "systemd-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-remount-fs.service": {
"name": "systemd-remount-fs.service",
"source": "systemd",
"state": "stopped",
"status": "enabled-runtime"
},
"systemd-repart.service": {
"name": "systemd-repart.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-rfkill.service": {
"name": "systemd-rfkill.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-soft-reboot.service": {
"name": "systemd-soft-reboot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-suspend-then-hibernate.service": {
"name": "systemd-suspend-then-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend.service": {
"name": "systemd-suspend.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-sysctl.service": {
"name": "systemd-sysctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-sysext.service": {
"name": "systemd-sysext.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-sysext@.service": {
"name": "systemd-sysext@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-sysupdate-reboot.service": {
"name": "systemd-sysupdate-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"systemd-sysupdate.service": {
"name": "systemd-sysupdate.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"systemd-sysusers.service": {
"name": "systemd-sysusers.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-timedated.service": {
"name": "systemd-timedated.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-timesyncd.service": {
"name": "systemd-timesyncd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"systemd-tmpfiles-clean.service": {
"name": "systemd-tmpfiles-clean.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup-dev-early.service": {
"name": "systemd-tmpfiles-setup-dev-early.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup-dev.service": {
"name": "systemd-tmpfiles-setup-dev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup.service": {
"name": "systemd-tmpfiles-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tpm2-setup-early.service": {
"name": "systemd-tpm2-setup-early.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tpm2-setup.service": {
"name": "systemd-tpm2-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udev-load-credentials.service": {
"name": "systemd-udev-load-credentials.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"systemd-udev-settle.service": {
"name": "systemd-udev-settle.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udev-trigger.service": {
"name": "systemd-udev-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udevd.service": {
"name": "systemd-udevd.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-update-done.service": {
"name": "systemd-update-done.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp-runlevel.service": {
"name": "systemd-update-utmp-runlevel.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp.service": {
"name": "systemd-update-utmp.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-user-sessions.service": {
"name": "systemd-user-sessions.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-vconsole-setup.service": {
"name": "systemd-vconsole-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-volatile-root.service": {
"name": "systemd-volatile-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"user-runtime-dir@.service": {
"name": "user-runtime-dir@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user-runtime-dir@0.service": {
"name": "user-runtime-dir@0.service",
"source": "systemd",
"state": "stopped",
"status": "active"
},
"user@.service": {
"name": "user@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user@0.service": {
"name": "user@0.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"ypbind.service": {
"name": "ypbind.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
}
}
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116
Saturday 08 February 2025 11:40:35 -0500 (0:00:02.081) 0:02:10.087 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state != \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Saturday 08 February 2025 11:40:35 -0500 (0:00:00.032) 0:02:10.120 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Container]\nImage=quay.io/linux-system-roles/mysql:5.6\nContainerName=quadlet-demo-mysql\nVolume=quadlet-demo-mysql.volume:/var/lib/mysql\nVolume=/tmp/quadlet_demo:/var/lib/quadlet_demo:Z\nNetwork=quadlet-demo.network\nSecret=mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD\nHealthCmd=/bin/true\nHealthOnFailure=kill\n",
"__podman_quadlet_template_src": "quadlet-demo-mysql.container.j2"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Saturday 08 February 2025 11:40:35 -0500 (0:00:00.155) 0:02:10.276 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "absent",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Saturday 08 February 2025 11:40:35 -0500 (0:00:00.061) 0:02:10.337 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_str",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Saturday 08 February 2025 11:40:35 -0500 (0:00:00.127) 0:02:10.465 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo-mysql",
"__podman_quadlet_type": "container",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Saturday 08 February 2025 11:40:35 -0500 (0:00:00.092) 0:02:10.557 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 08 February 2025 11:40:35 -0500 (0:00:00.101) 0:02:10.659 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 08 February 2025 11:40:35 -0500 (0:00:00.046) 0:02:10.705 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 08 February 2025 11:40:35 -0500 (0:00:00.047) 0:02:10.753 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Saturday 08 February 2025 11:40:35 -0500 (0:00:00.050) 0:02:10.804 *****
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1739032452.5561087,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
"ctime": 1739032446.715072,
"dev": 51714,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 9150722,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-pie-executable",
"mode": "0755",
"mtime": 1730678400.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 15744,
"uid": 0,
"version": "796201794",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Saturday 08 February 2025 11:40:36 -0500 (0:00:00.399) 0:02:11.203 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Saturday 08 February 2025 11:40:36 -0500 (0:00:00.059) 0:02:11.263 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Saturday 08 February 2025 11:40:36 -0500 (0:00:00.060) 0:02:11.323 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Saturday 08 February 2025 11:40:36 -0500 (0:00:00.059) 0:02:11.382 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Saturday 08 February 2025 11:40:36 -0500 (0:00:00.057) 0:02:11.440 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Saturday 08 February 2025 11:40:36 -0500 (0:00:00.074) 0:02:11.515 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Saturday 08 February 2025 11:40:36 -0500 (0:00:00.059) 0:02:11.574 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Saturday 08 February 2025 11:40:36 -0500 (0:00:00.055) 0:02:11.630 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Saturday 08 February 2025 11:40:36 -0500 (0:00:00.057) 0:02:11.688 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [
"quay.io/linux-system-roles/mysql:5.6"
],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "quadlet-demo-mysql.service",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Saturday 08 February 2025 11:40:36 -0500 (0:00:00.160) 0:02:11.848 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Saturday 08 February 2025 11:40:36 -0500 (0:00:00.059) 0:02:11.907 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state != \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Saturday 08 February 2025 11:40:36 -0500 (0:00:00.055) 0:02:11.962 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [
"quay.io/linux-system-roles/mysql:5.6"
],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.container",
"__podman_volumes": [
"/tmp/quadlet_demo"
]
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105
Saturday 08 February 2025 11:40:37 -0500 (0:00:00.131) 0:02:12.094 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112
Saturday 08 February 2025 11:40:37 -0500 (0:00:00.067) 0:02:12.161 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4
Saturday 08 February 2025 11:40:37 -0500 (0:00:00.133) 0:02:12.295 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stop and disable service] *************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12
Saturday 08 February 2025 11:40:37 -0500 (0:00:00.052) 0:02:12.347 *****
changed: [managed-node1] => {
"changed": true,
"enabled": false,
"failed_when_result": false,
"name": "quadlet-demo-mysql.service",
"state": "stopped",
"status": {
"AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0",
"ActiveEnterTimestamp": "Sat 2025-02-08 11:39:14 EST",
"ActiveEnterTimestampMonotonic": "687375924",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "active",
"After": "tmp.mount system.slice -.mount network-online.target basic.target systemd-journald.socket quadlet-demo-mysql-volume.service quadlet-demo-network.service sysinit.target",
"AllowIsolate": "no",
"AssertResult": "yes",
"AssertTimestamp": "Sat 2025-02-08 11:39:13 EST",
"AssertTimestampMonotonic": "687115388",
"Before": "shutdown.target multi-user.target",
"BindLogSockets": "no",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "yes",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "2760797000",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanLiveMount": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
"CleanResult": "success",
"CollectMode": "inactive",
"ConditionResult": "yes",
"ConditionTimestamp": "Sat 2025-02-08 11:39:13 EST",
"ConditionTimestampMonotonic": "687115384",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlGroup": "/system.slice/quadlet-demo-mysql.service",
"ControlGroupId": "142763",
"ControlPID": "0",
"CoredumpFilter": "0x33",
"CoredumpReceive": "no",
"DebugInvocation": "no",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"DefaultStartupMemoryLow": "0",
"Delegate": "yes",
"DelegateControllers": "cpu cpuset io memory pids",
"Description": "quadlet-demo-mysql.service",
"DevicePolicy": "auto",
"DynamicUser": "no",
"EffectiveCPUs": "0-1",
"EffectiveMemoryHigh": "3698229248",
"EffectiveMemoryMax": "3698229248",
"EffectiveMemoryNodes": "0",
"EffectiveTasksMax": "22364",
"Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainHandoffTimestampMonotonic": "0",
"ExecMainPID": "67248",
"ExecMainStartTimestamp": "Sat 2025-02-08 11:39:14 EST",
"ExecMainStartTimestampMonotonic": "687320001",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExitType": "main",
"ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FileDescriptorStorePreserve": "restart",
"FinalKillSignal": "9",
"FragmentPath": "/run/systemd/generator/quadlet-demo-mysql.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOReadBytes": "[not set]",
"IOReadOperations": "[not set]",
"IOSchedulingClass": "2",
"IOSchedulingPriority": "4",
"IOWeight": "[not set]",
"IOWriteBytes": "[not set]",
"IOWriteOperations": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "[no data]",
"IPEgressPackets": "[no data]",
"IPIngressBytes": "[no data]",
"IPIngressPackets": "[no data]",
"Id": "quadlet-demo-mysql.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestamp": "Sat 2025-02-08 11:39:13 EST",
"InactiveExitTimestampMonotonic": "687124966",
"InvocationID": "050073d457314dc7b3c4a7655afe611d",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "infinity",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "8388608",
"LimitMEMLOCKSoft": "8388608",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "524288",
"LimitNOFILESoft": "1024",
"LimitNPROC": "13977",
"LimitNPROCSoft": "13977",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "13977",
"LimitSIGPENDINGSoft": "13977",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LiveMountResult": "success",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "67248",
"ManagedOOMMemoryPressure": "auto",
"ManagedOOMMemoryPressureDurationUSec": "[not set]",
"ManagedOOMMemoryPressureLimit": "0",
"ManagedOOMPreference": "none",
"ManagedOOMSwap": "auto",
"MemoryAccounting": "yes",
"MemoryAvailable": "2691334144",
"MemoryCurrent": "599973888",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryKSM": "no",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemoryPeak": "650608640",
"MemoryPressureThresholdUSec": "200ms",
"MemoryPressureWatch": "auto",
"MemorySwapCurrent": "0",
"MemorySwapMax": "infinity",
"MemorySwapPeak": "0",
"MemoryZSwapCurrent": "0",
"MemoryZSwapMax": "infinity",
"MemoryZSwapWriteback": "yes",
"MountAPIVFS": "no",
"MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAPolicy": "n/a",
"Names": "quadlet-demo-mysql.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "all",
"OOMPolicy": "continue",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"OnSuccessJobMode": "fail",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateIPC": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivatePIDs": "no",
"PrivateTmp": "no",
"PrivateTmpEx": "no",
"PrivateUsers": "no",
"PrivateUsersEx": "no",
"ProcSubset": "all",
"ProtectClock": "no",
"ProtectControlGroups": "no",
"ProtectControlGroupsEx": "no",
"ProtectHome": "no",
"ProtectHostname": "no",
"ProtectKernelLogs": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectProc": "default",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"ReloadResult": "success",
"ReloadSignal": "1",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "sysinit.target system.slice quadlet-demo-network.service quadlet-demo-mysql-volume.service -.mount",
"RequiresMountsFor": "/run/containers /tmp/quadlet_demo",
"Restart": "no",
"RestartKillSignal": "15",
"RestartMaxDelayUSec": "infinity",
"RestartMode": "normal",
"RestartSteps": "0",
"RestartUSec": "100ms",
"RestartUSecNext": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RootEphemeral": "no",
"RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"RuntimeRandomizedExtraUSec": "0",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"SetLoginEnvironment": "no",
"Slice": "system.slice",
"SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.container",
"StandardError": "inherit",
"StandardInput": "null",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StartupMemoryHigh": "infinity",
"StartupMemoryLow": "0",
"StartupMemoryMax": "infinity",
"StartupMemorySwapMax": "infinity",
"StartupMemoryZSwapMax": "infinity",
"StateChangeTimestamp": "Sat 2025-02-08 11:39:14 EST",
"StateChangeTimestampMonotonic": "687375924",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "running",
"SuccessAction": "none",
"SurviveFinalKillSignal": "no",
"SyslogFacility": "3",
"SyslogIdentifier": "quadlet-demo-mysql",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "2147483646",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "23",
"TasksMax": "22364",
"TimeoutAbortUSec": "1min 30s",
"TimeoutCleanUSec": "infinity",
"TimeoutStartFailureMode": "terminate",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopFailureMode": "terminate",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "notify",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "generated",
"UtmpMode": "init",
"WantedBy": "multi-user.target",
"Wants": "network-online.target",
"WatchdogSignal": "6",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.podman : See if quadlet file exists] ***********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33
Saturday 08 February 2025 11:40:40 -0500 (0:00:02.947) 0:02:15.294 *****
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1739032752.6137052,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 8,
"charset": "us-ascii",
"checksum": "ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4",
"ctime": 1739032752.6167052,
"dev": 51714,
"device_type": 0,
"executable": false,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 511705320,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "text/plain",
"mode": "0644",
"mtime": 1739032752.3367038,
"nlink": 1,
"path": "/etc/containers/systemd/quadlet-demo-mysql.container",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 363,
"uid": 0,
"version": "3078291780",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": false,
"xoth": false,
"xusr": false
}
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38
Saturday 08 February 2025 11:40:40 -0500 (0:00:00.379) 0:02:15.674 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Slurp quadlet file] *******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6
Saturday 08 February 2025 11:40:40 -0500 (0:00:00.060) 0:02:15.735 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12
Saturday 08 February 2025 11:40:41 -0500 (0:00:00.355) 0:02:16.090 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44
Saturday 08 February 2025 11:40:41 -0500 (0:00:00.052) 0:02:16.143 *****
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Reset raw variable] *******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52
Saturday 08 February 2025 11:40:41 -0500 (0:00:00.033) 0:02:16.176 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_raw": null
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Remove quadlet file] ******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42
Saturday 08 February 2025 11:40:41 -0500 (0:00:00.033) 0:02:16.210 *****
changed: [managed-node1] => {
"changed": true,
"path": "/etc/containers/systemd/quadlet-demo-mysql.container",
"state": "absent"
}
TASK [fedora.linux_system_roles.podman : Refresh systemd] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48
Saturday 08 February 2025 11:40:41 -0500 (0:00:00.424) 0:02:16.635 *****
ok: [managed-node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Remove managed resource] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58
Saturday 08 February 2025 11:40:42 -0500 (0:00:00.755) 0:02:17.390 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Remove volumes] ***********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99
Saturday 08 February 2025 11:40:42 -0500 (0:00:00.417) 0:02:17.807 *****
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] *********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116
Saturday 08 February 2025 11:40:42 -0500 (0:00:00.047) 0:02:17.855 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_parsed": null
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120
Saturday 08 February 2025 11:40:42 -0500 (0:00:00.032) 0:02:17.888 *****
changed: [managed-node1] => {
"changed": true,
"cmd": [
"podman",
"image",
"prune",
"--all",
"-f"
],
"delta": "0:00:00.255588",
"end": "2025-02-08 11:40:43.431281",
"rc": 0,
"start": "2025-02-08 11:40:43.175693"
}
STDOUT:
dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131
Saturday 08 February 2025 11:40:43 -0500 (0:00:00.630) 0:02:18.518 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 08 February 2025 11:40:43 -0500 (0:00:00.063) 0:02:18.581 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 08 February 2025 11:40:43 -0500 (0:00:00.041) 0:02:18.622 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 08 February 2025 11:40:43 -0500 (0:00:00.032) 0:02:18.655 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - images] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141
Saturday 08 February 2025 11:40:43 -0500 (0:00:00.031) 0:02:18.687 *****
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"images",
"-n"
],
"delta": "0:00:00.031237",
"end": "2025-02-08 11:40:44.007682",
"rc": 0,
"start": "2025-02-08 11:40:43.976445"
}
STDOUT:
quay.io/libpod/registry 2.8.2 0030ba3d620c 18 months ago 24.6 MB
TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150
Saturday 08 February 2025 11:40:44 -0500 (0:00:00.404) 0:02:19.092 *****
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"volume",
"ls",
"-n"
],
"delta": "0:00:00.028536",
"end": "2025-02-08 11:40:44.410141",
"rc": 0,
"start": "2025-02-08 11:40:44.381605"
}
STDOUT:
local 2ce71b63066ed4402ab34684197d9d1868b43cd94dbf701d92e842ee487e7893
local systemd-quadlet-demo-mysql
TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159
Saturday 08 February 2025 11:40:44 -0500 (0:00:00.403) 0:02:19.495 *****
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"ps",
"--noheading"
],
"delta": "0:00:00.032433",
"end": "2025-02-08 11:40:44.818346",
"rc": 0,
"start": "2025-02-08 11:40:44.785913"
}
STDOUT:
93148221a3fd quay.io/libpod/registry:2.8.2 /etc/docker/regis... 6 minutes ago Up 6 minutes 127.0.0.1:5000->5000/tcp podman_registry
TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168
Saturday 08 February 2025 11:40:44 -0500 (0:00:00.405) 0:02:19.900 *****
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"network",
"ls",
"-n",
"-q"
],
"delta": "0:00:00.028205",
"end": "2025-02-08 11:40:45.220207",
"rc": 0,
"start": "2025-02-08 11:40:45.192002"
}
STDOUT:
podman
podman-default-kube-network
systemd-quadlet-demo
TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177
Saturday 08 February 2025 11:40:45 -0500 (0:00:00.405) 0:02:20.306 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187
Saturday 08 February 2025 11:40:45 -0500 (0:00:00.398) 0:02:20.704 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - services] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197
Saturday 08 February 2025 11:40:46 -0500 (0:00:00.394) 0:02:21.099 *****
ok: [managed-node1] => {
"ansible_facts": {
"services": {
"NetworkManager-dispatcher.service": {
"name": "NetworkManager-dispatcher.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"NetworkManager-wait-online.service": {
"name": "NetworkManager-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"NetworkManager.service": {
"name": "NetworkManager.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"apt-daily.service": {
"name": "apt-daily.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"audit-rules.service": {
"name": "audit-rules.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"auditd.service": {
"name": "auditd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"auth-rpcgss-module.service": {
"name": "auth-rpcgss-module.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"autofs.service": {
"name": "autofs.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"autovt@.service": {
"name": "autovt@.service",
"source": "systemd",
"state": "unknown",
"status": "alias"
},
"blk-availability.service": {
"name": "blk-availability.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"capsule@.service": {
"name": "capsule@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"certmonger.service": {
"name": "certmonger.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"chrony-wait.service": {
"name": "chrony-wait.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"chronyd-restricted.service": {
"name": "chronyd-restricted.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"chronyd.service": {
"name": "chronyd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"cloud-config.service": {
"name": "cloud-config.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-final.service": {
"name": "cloud-final.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init-hotplugd.service": {
"name": "cloud-init-hotplugd.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"cloud-init-local.service": {
"name": "cloud-init-local.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init.service": {
"name": "cloud-init.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"console-getty.service": {
"name": "console-getty.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"container-getty@.service": {
"name": "container-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"crond.service": {
"name": "crond.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"dbus-broker.service": {
"name": "dbus-broker.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"dbus-org.fedoraproject.FirewallD1.service": {
"name": "dbus-org.fedoraproject.FirewallD1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.hostname1.service": {
"name": "dbus-org.freedesktop.hostname1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.locale1.service": {
"name": "dbus-org.freedesktop.locale1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.login1.service": {
"name": "dbus-org.freedesktop.login1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.nm-dispatcher.service": {
"name": "dbus-org.freedesktop.nm-dispatcher.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.timedate1.service": {
"name": "dbus-org.freedesktop.timedate1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus.service": {
"name": "dbus.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"debug-shell.service": {
"name": "debug-shell.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dhcpcd.service": {
"name": "dhcpcd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dhcpcd@.service": {
"name": "dhcpcd@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"display-manager.service": {
"name": "display-manager.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"dm-event.service": {
"name": "dm-event.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dnf-makecache.service": {
"name": "dnf-makecache.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dnf-system-upgrade-cleanup.service": {
"name": "dnf-system-upgrade-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dnf-system-upgrade.service": {
"name": "dnf-system-upgrade.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dracut-cmdline.service": {
"name": "dracut-cmdline.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-initqueue.service": {
"name": "dracut-initqueue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-mount.service": {
"name": "dracut-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-mount.service": {
"name": "dracut-pre-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-pivot.service": {
"name": "dracut-pre-pivot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-trigger.service": {
"name": "dracut-pre-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-udev.service": {
"name": "dracut-pre-udev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown-onfailure.service": {
"name": "dracut-shutdown-onfailure.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown.service": {
"name": "dracut-shutdown.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ebtables.service": {
"name": "ebtables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"emergency.service": {
"name": "emergency.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"fips-crypto-policy-overlay.service": {
"name": "fips-crypto-policy-overlay.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"firewalld.service": {
"name": "firewalld.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"fsidd.service": {
"name": "fsidd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"fstrim.service": {
"name": "fstrim.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"getty@.service": {
"name": "getty@.service",
"source": "systemd",
"state": "unknown",
"status": "enabled"
},
"getty@tty1.service": {
"name": "getty@tty1.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"grub-boot-indeterminate.service": {
"name": "grub-boot-indeterminate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"grub2-systemd-integration.service": {
"name": "grub2-systemd-integration.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"gssproxy.service": {
"name": "gssproxy.service",
"source": "systemd",
"state": "running",
"status": "disabled"
},
"hv_kvp_daemon.service": {
"name": "hv_kvp_daemon.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"initrd-cleanup.service": {
"name": "initrd-cleanup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-parse-etc.service": {
"name": "initrd-parse-etc.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-switch-root.service": {
"name": "initrd-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-udevadm-cleanup-db.service": {
"name": "initrd-udevadm-cleanup-db.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ip6tables.service": {
"name": "ip6tables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ipset.service": {
"name": "ipset.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"iptables.service": {
"name": "iptables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"irqbalance.service": {
"name": "irqbalance.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"kdump.service": {
"name": "kdump.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"kmod-static-nodes.service": {
"name": "kmod-static-nodes.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"kvm_stat.service": {
"name": "kvm_stat.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"ldconfig.service": {
"name": "ldconfig.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"logrotate.service": {
"name": "logrotate.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"lvm-devices-import.service": {
"name": "lvm-devices-import.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"lvm2-lvmpolld.service": {
"name": "lvm2-lvmpolld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"lvm2-monitor.service": {
"name": "lvm2-monitor.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"man-db-cache-update.service": {
"name": "man-db-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"man-db-restart-cache-update.service": {
"name": "man-db-restart-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"microcode.service": {
"name": "microcode.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"modprobe@.service": {
"name": "modprobe@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"modprobe@configfs.service": {
"name": "modprobe@configfs.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@dm_mod.service": {
"name": "modprobe@dm_mod.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@drm.service": {
"name": "modprobe@drm.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@efi_pstore.service": {
"name": "modprobe@efi_pstore.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@fuse.service": {
"name": "modprobe@fuse.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@loop.service": {
"name": "modprobe@loop.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"netavark-dhcp-proxy.service": {
"name": "netavark-dhcp-proxy.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"netavark-firewalld-reload.service": {
"name": "netavark-firewalld-reload.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-blkmap.service": {
"name": "nfs-blkmap.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-idmapd.service": {
"name": "nfs-idmapd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-mountd.service": {
"name": "nfs-mountd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-server.service": {
"name": "nfs-server.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"nfs-utils.service": {
"name": "nfs-utils.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfsdcld.service": {
"name": "nfsdcld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nftables.service": {
"name": "nftables.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nis-domainname.service": {
"name": "nis-domainname.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nm-priv-helper.service": {
"name": "nm-priv-helper.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"ntpd.service": {
"name": "ntpd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ntpdate.service": {
"name": "ntpdate.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"pam_namespace.service": {
"name": "pam_namespace.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"pcscd.service": {
"name": "pcscd.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"plymouth-quit-wait.service": {
"name": "plymouth-quit-wait.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"plymouth-start.service": {
"name": "plymouth-start.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"podman-auto-update.service": {
"name": "podman-auto-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman-clean-transient.service": {
"name": "podman-clean-transient.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman-kube@.service": {
"name": "podman-kube@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"podman-restart.service": {
"name": "podman-restart.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman.service": {
"name": "podman.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"polkit.service": {
"name": "polkit.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"qemu-guest-agent.service": {
"name": "qemu-guest-agent.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"quadlet-demo-mysql-volume.service": {
"name": "quadlet-demo-mysql-volume.service",
"source": "systemd",
"state": "stopped",
"status": "generated"
},
"quadlet-demo-network.service": {
"name": "quadlet-demo-network.service",
"source": "systemd",
"state": "stopped",
"status": "generated"
},
"quotaon-root.service": {
"name": "quotaon-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"quotaon@.service": {
"name": "quotaon@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"rc-local.service": {
"name": "rc-local.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rescue.service": {
"name": "rescue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"restraintd.service": {
"name": "restraintd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rngd.service": {
"name": "rngd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rpc-gssd.service": {
"name": "rpc-gssd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd-notify.service": {
"name": "rpc-statd-notify.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd.service": {
"name": "rpc-statd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-svcgssd.service": {
"name": "rpc-svcgssd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"rpcbind.service": {
"name": "rpcbind.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rpmdb-migrate.service": {
"name": "rpmdb-migrate.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"rpmdb-rebuild.service": {
"name": "rpmdb-rebuild.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"rsyslog.service": {
"name": "rsyslog.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"selinux-autorelabel-mark.service": {
"name": "selinux-autorelabel-mark.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"selinux-autorelabel.service": {
"name": "selinux-autorelabel.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"selinux-check-proper-disable.service": {
"name": "selinux-check-proper-disable.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"serial-getty@.service": {
"name": "serial-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "indirect"
},
"serial-getty@ttyS0.service": {
"name": "serial-getty@ttyS0.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"sntp.service": {
"name": "sntp.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ssh-host-keys-migration.service": {
"name": "ssh-host-keys-migration.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"sshd-keygen.service": {
"name": "sshd-keygen.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"sshd-keygen@.service": {
"name": "sshd-keygen@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"sshd-keygen@ecdsa.service": {
"name": "sshd-keygen@ecdsa.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-keygen@ed25519.service": {
"name": "sshd-keygen@ed25519.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-keygen@rsa.service": {
"name": "sshd-keygen@rsa.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-unix-local@.service": {
"name": "sshd-unix-local@.service",
"source": "systemd",
"state": "unknown",
"status": "alias"
},
"sshd-vsock@.service": {
"name": "sshd-vsock@.service",
"source": "systemd",
"state": "unknown",
"status": "alias"
},
"sshd.service": {
"name": "sshd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"sshd@.service": {
"name": "sshd@.service",
"source": "systemd",
"state": "unknown",
"status": "indirect"
},
"sssd-autofs.service": {
"name": "sssd-autofs.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-kcm.service": {
"name": "sssd-kcm.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"sssd-nss.service": {
"name": "sssd-nss.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pac.service": {
"name": "sssd-pac.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pam.service": {
"name": "sssd-pam.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-ssh.service": {
"name": "sssd-ssh.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-sudo.service": {
"name": "sssd-sudo.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd.service": {
"name": "sssd.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"syslog.service": {
"name": "syslog.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"system-update-cleanup.service": {
"name": "system-update-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-ask-password-console.service": {
"name": "systemd-ask-password-console.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-wall.service": {
"name": "systemd-ask-password-wall.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-backlight@.service": {
"name": "systemd-backlight@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-battery-check.service": {
"name": "systemd-battery-check.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-binfmt.service": {
"name": "systemd-binfmt.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-bless-boot.service": {
"name": "systemd-bless-boot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-boot-check-no-failures.service": {
"name": "systemd-boot-check-no-failures.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-boot-random-seed.service": {
"name": "systemd-boot-random-seed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-boot-update.service": {
"name": "systemd-boot-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-bootctl@.service": {
"name": "systemd-bootctl@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-confext.service": {
"name": "systemd-confext.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-coredump@.service": {
"name": "systemd-coredump@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-creds@.service": {
"name": "systemd-creds@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-exit.service": {
"name": "systemd-exit.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-firstboot.service": {
"name": "systemd-firstboot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck-root.service": {
"name": "systemd-fsck-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck@.service": {
"name": "systemd-fsck@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-growfs-root.service": {
"name": "systemd-growfs-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-growfs@.service": {
"name": "systemd-growfs@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-halt.service": {
"name": "systemd-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hibernate-clear.service": {
"name": "systemd-hibernate-clear.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hibernate-resume.service": {
"name": "systemd-hibernate-resume.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hibernate.service": {
"name": "systemd-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hostnamed.service": {
"name": "systemd-hostnamed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hwdb-update.service": {
"name": "systemd-hwdb-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hybrid-sleep.service": {
"name": "systemd-hybrid-sleep.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-initctl.service": {
"name": "systemd-initctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-catalog-update.service": {
"name": "systemd-journal-catalog-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-flush.service": {
"name": "systemd-journal-flush.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journald-sync@.service": {
"name": "systemd-journald-sync@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-journald.service": {
"name": "systemd-journald.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-journald@.service": {
"name": "systemd-journald@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-kexec.service": {
"name": "systemd-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-localed.service": {
"name": "systemd-localed.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-logind.service": {
"name": "systemd-logind.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-machine-id-commit.service": {
"name": "systemd-machine-id-commit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-modules-load.service": {
"name": "systemd-modules-load.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-network-generator.service": {
"name": "systemd-network-generator.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-networkd-wait-online.service": {
"name": "systemd-networkd-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"systemd-oomd.service": {
"name": "systemd-oomd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"systemd-pcrextend@.service": {
"name": "systemd-pcrextend@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-pcrfs-root.service": {
"name": "systemd-pcrfs-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-pcrfs@.service": {
"name": "systemd-pcrfs@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-pcrlock-file-system.service": {
"name": "systemd-pcrlock-file-system.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-firmware-code.service": {
"name": "systemd-pcrlock-firmware-code.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-firmware-config.service": {
"name": "systemd-pcrlock-firmware-config.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-machine-id.service": {
"name": "systemd-pcrlock-machine-id.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-make-policy.service": {
"name": "systemd-pcrlock-make-policy.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-secureboot-authority.service": {
"name": "systemd-pcrlock-secureboot-authority.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-secureboot-policy.service": {
"name": "systemd-pcrlock-secureboot-policy.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock@.service": {
"name": "systemd-pcrlock@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-pcrmachine.service": {
"name": "systemd-pcrmachine.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-pcrphase-initrd.service": {
"name": "systemd-pcrphase-initrd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-pcrphase-sysinit.service": {
"name": "systemd-pcrphase-sysinit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-pcrphase.service": {
"name": "systemd-pcrphase.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-poweroff.service": {
"name": "systemd-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-pstore.service": {
"name": "systemd-pstore.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-quotacheck-root.service": {
"name": "systemd-quotacheck-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-quotacheck@.service": {
"name": "systemd-quotacheck@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-random-seed.service": {
"name": "systemd-random-seed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-reboot.service": {
"name": "systemd-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-remount-fs.service": {
"name": "systemd-remount-fs.service",
"source": "systemd",
"state": "stopped",
"status": "enabled-runtime"
},
"systemd-repart.service": {
"name": "systemd-repart.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-rfkill.service": {
"name": "systemd-rfkill.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-soft-reboot.service": {
"name": "systemd-soft-reboot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-suspend-then-hibernate.service": {
"name": "systemd-suspend-then-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend.service": {
"name": "systemd-suspend.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-sysctl.service": {
"name": "systemd-sysctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-sysext.service": {
"name": "systemd-sysext.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-sysext@.service": {
"name": "systemd-sysext@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-sysupdate-reboot.service": {
"name": "systemd-sysupdate-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"systemd-sysupdate.service": {
"name": "systemd-sysupdate.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"systemd-sysusers.service": {
"name": "systemd-sysusers.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-timedated.service": {
"name": "systemd-timedated.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-timesyncd.service": {
"name": "systemd-timesyncd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"systemd-tmpfiles-clean.service": {
"name": "systemd-tmpfiles-clean.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup-dev-early.service": {
"name": "systemd-tmpfiles-setup-dev-early.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup-dev.service": {
"name": "systemd-tmpfiles-setup-dev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup.service": {
"name": "systemd-tmpfiles-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tpm2-setup-early.service": {
"name": "systemd-tpm2-setup-early.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tpm2-setup.service": {
"name": "systemd-tpm2-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udev-load-credentials.service": {
"name": "systemd-udev-load-credentials.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"systemd-udev-settle.service": {
"name": "systemd-udev-settle.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udev-trigger.service": {
"name": "systemd-udev-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udevd.service": {
"name": "systemd-udevd.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-update-done.service": {
"name": "systemd-update-done.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp-runlevel.service": {
"name": "systemd-update-utmp-runlevel.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp.service": {
"name": "systemd-update-utmp.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-user-sessions.service": {
"name": "systemd-user-sessions.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-vconsole-setup.service": {
"name": "systemd-vconsole-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-volatile-root.service": {
"name": "systemd-volatile-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"user-runtime-dir@.service": {
"name": "user-runtime-dir@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user-runtime-dir@0.service": {
"name": "user-runtime-dir@0.service",
"source": "systemd",
"state": "stopped",
"status": "active"
},
"user@.service": {
"name": "user@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user@0.service": {
"name": "user@0.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"ypbind.service": {
"name": "ypbind.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
}
}
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116
Saturday 08 February 2025 11:40:48 -0500 (0:00:02.168) 0:02:23.268 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state != \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Saturday 08 February 2025 11:40:48 -0500 (0:00:00.034) 0:02:23.302 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "quadlet-demo-mysql.volume",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "[Volume]",
"__podman_quadlet_template_src": ""
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Saturday 08 February 2025 11:40:48 -0500 (0:00:00.045) 0:02:23.347 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "absent",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Saturday 08 February 2025 11:40:48 -0500 (0:00:00.040) 0:02:23.388 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Saturday 08 February 2025 11:40:48 -0500 (0:00:00.035) 0:02:23.423 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo-mysql",
"__podman_quadlet_type": "volume",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Saturday 08 February 2025 11:40:48 -0500 (0:00:00.048) 0:02:23.472 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 08 February 2025 11:40:48 -0500 (0:00:00.061) 0:02:23.533 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 08 February 2025 11:40:48 -0500 (0:00:00.037) 0:02:23.571 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 08 February 2025 11:40:48 -0500 (0:00:00.036) 0:02:23.608 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Saturday 08 February 2025 11:40:48 -0500 (0:00:00.044) 0:02:23.652 *****
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1739032452.5561087,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
"ctime": 1739032446.715072,
"dev": 51714,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 9150722,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-pie-executable",
"mode": "0755",
"mtime": 1730678400.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 15744,
"uid": 0,
"version": "796201794",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Saturday 08 February 2025 11:40:49 -0500 (0:00:00.373) 0:02:24.025 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Saturday 08 February 2025 11:40:49 -0500 (0:00:00.035) 0:02:24.061 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Saturday 08 February 2025 11:40:49 -0500 (0:00:00.033) 0:02:24.095 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Saturday 08 February 2025 11:40:49 -0500 (0:00:00.034) 0:02:24.129 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Saturday 08 February 2025 11:40:49 -0500 (0:00:00.034) 0:02:24.164 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Saturday 08 February 2025 11:40:49 -0500 (0:00:00.080) 0:02:24.244 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Saturday 08 February 2025 11:40:49 -0500 (0:00:00.035) 0:02:24.280 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Saturday 08 February 2025 11:40:49 -0500 (0:00:00.033) 0:02:24.313 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Saturday 08 February 2025 11:40:49 -0500 (0:00:00.035) 0:02:24.349 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "quadlet-demo-mysql-volume.service",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Saturday 08 February 2025 11:40:49 -0500 (0:00:00.056) 0:02:24.405 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Saturday 08 February 2025 11:40:49 -0500 (0:00:00.034) 0:02:24.439 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state != \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Saturday 08 February 2025 11:40:49 -0500 (0:00:00.032) 0:02:24.471 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.volume",
"__podman_volumes": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105
Saturday 08 February 2025 11:40:49 -0500 (0:00:00.075) 0:02:24.547 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112
Saturday 08 February 2025 11:40:49 -0500 (0:00:00.039) 0:02:24.586 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4
Saturday 08 February 2025 11:40:49 -0500 (0:00:00.078) 0:02:24.664 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stop and disable service] *************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12
Saturday 08 February 2025 11:40:49 -0500 (0:00:00.039) 0:02:24.704 *****
changed: [managed-node1] => {
"changed": true,
"enabled": false,
"failed_when_result": false,
"name": "quadlet-demo-mysql-volume.service",
"state": "stopped",
"status": {
"AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0",
"ActiveEnterTimestamp": "Sat 2025-02-08 11:39:02 EST",
"ActiveEnterTimestampMonotonic": "676030650",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "active",
"After": "sysinit.target network-online.target systemd-journald.socket basic.target -.mount system.slice",
"AllowIsolate": "no",
"AssertResult": "yes",
"AssertTimestamp": "Sat 2025-02-08 11:39:02 EST",
"AssertTimestampMonotonic": "675982761",
"Before": "shutdown.target",
"BindLogSockets": "no",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "yes",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "36563000",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanLiveMount": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
"CleanResult": "success",
"CollectMode": "inactive",
"ConditionResult": "yes",
"ConditionTimestamp": "Sat 2025-02-08 11:39:02 EST",
"ConditionTimestampMonotonic": "675982758",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlGroupId": "142230",
"ControlPID": "0",
"CoredumpFilter": "0x33",
"CoredumpReceive": "no",
"DebugInvocation": "no",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"DefaultStartupMemoryLow": "0",
"Delegate": "no",
"Description": "quadlet-demo-mysql-volume.service",
"DevicePolicy": "auto",
"DynamicUser": "no",
"EffectiveMemoryHigh": "3698229248",
"EffectiveMemoryMax": "3698229248",
"EffectiveTasksMax": "22364",
"ExecMainCode": "1",
"ExecMainExitTimestamp": "Sat 2025-02-08 11:39:02 EST",
"ExecMainExitTimestampMonotonic": "676030471",
"ExecMainHandoffTimestamp": "Sat 2025-02-08 11:39:02 EST",
"ExecMainHandoffTimestampMonotonic": "675993616",
"ExecMainPID": "65915",
"ExecMainStartTimestamp": "Sat 2025-02-08 11:39:02 EST",
"ExecMainStartTimestampMonotonic": "675983517",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExitType": "main",
"ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FileDescriptorStorePreserve": "restart",
"FinalKillSignal": "9",
"FragmentPath": "/run/systemd/generator/quadlet-demo-mysql-volume.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOReadBytes": "[not set]",
"IOReadOperations": "[not set]",
"IOSchedulingClass": "2",
"IOSchedulingPriority": "4",
"IOWeight": "[not set]",
"IOWriteBytes": "[not set]",
"IOWriteOperations": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "[no data]",
"IPEgressPackets": "[no data]",
"IPIngressBytes": "[no data]",
"IPIngressPackets": "[no data]",
"Id": "quadlet-demo-mysql-volume.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestamp": "Sat 2025-02-08 11:39:02 EST",
"InactiveExitTimestampMonotonic": "675983981",
"InvocationID": "b023fa8ac7f24463b4dd9199f672cbda",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "control-group",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "infinity",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "8388608",
"LimitMEMLOCKSoft": "8388608",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "524288",
"LimitNOFILESoft": "1024",
"LimitNPROC": "13977",
"LimitNPROCSoft": "13977",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "13977",
"LimitSIGPENDINGSoft": "13977",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LiveMountResult": "success",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"ManagedOOMMemoryPressure": "auto",
"ManagedOOMMemoryPressureDurationUSec": "[not set]",
"ManagedOOMMemoryPressureLimit": "0",
"ManagedOOMPreference": "none",
"ManagedOOMSwap": "auto",
"MemoryAccounting": "yes",
"MemoryAvailable": "3171868672",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryKSM": "no",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemoryPeak": "16334848",
"MemoryPressureThresholdUSec": "200ms",
"MemoryPressureWatch": "auto",
"MemorySwapCurrent": "[not set]",
"MemorySwapMax": "infinity",
"MemorySwapPeak": "0",
"MemoryZSwapCurrent": "[not set]",
"MemoryZSwapMax": "infinity",
"MemoryZSwapWriteback": "yes",
"MountAPIVFS": "no",
"MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAPolicy": "n/a",
"Names": "quadlet-demo-mysql-volume.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMPolicy": "stop",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"OnSuccessJobMode": "fail",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateIPC": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivatePIDs": "no",
"PrivateTmp": "no",
"PrivateTmpEx": "no",
"PrivateUsers": "no",
"PrivateUsersEx": "no",
"ProcSubset": "all",
"ProtectClock": "no",
"ProtectControlGroups": "no",
"ProtectControlGroupsEx": "no",
"ProtectHome": "no",
"ProtectHostname": "no",
"ProtectKernelLogs": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectProc": "default",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"ReloadResult": "success",
"ReloadSignal": "1",
"RemainAfterExit": "yes",
"RemoveIPC": "no",
"Requires": "sysinit.target -.mount system.slice",
"RequiresMountsFor": "/run/containers",
"Restart": "no",
"RestartKillSignal": "15",
"RestartMaxDelayUSec": "infinity",
"RestartMode": "normal",
"RestartSteps": "0",
"RestartUSec": "100ms",
"RestartUSecNext": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RootEphemeral": "no",
"RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"RuntimeRandomizedExtraUSec": "0",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"SetLoginEnvironment": "no",
"Slice": "system.slice",
"SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.volume",
"StandardError": "inherit",
"StandardInput": "null",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StartupMemoryHigh": "infinity",
"StartupMemoryLow": "0",
"StartupMemoryMax": "infinity",
"StartupMemorySwapMax": "infinity",
"StartupMemoryZSwapMax": "infinity",
"StateChangeTimestamp": "Sat 2025-02-08 11:39:02 EST",
"StateChangeTimestampMonotonic": "676030650",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "exited",
"SuccessAction": "none",
"SurviveFinalKillSignal": "no",
"SyslogFacility": "3",
"SyslogIdentifier": "quadlet-demo-mysql-volume",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "2147483646",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "22364",
"TimeoutAbortUSec": "1min 30s",
"TimeoutCleanUSec": "infinity",
"TimeoutStartFailureMode": "terminate",
"TimeoutStartUSec": "infinity",
"TimeoutStopFailureMode": "terminate",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "oneshot",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "generated",
"UtmpMode": "init",
"Wants": "network-online.target",
"WatchdogSignal": "6",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.podman : See if quadlet file exists] ***********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33
Saturday 08 February 2025 11:40:50 -0500 (0:00:00.778) 0:02:25.483 *****
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1739032741.2946477,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 8,
"charset": "us-ascii",
"checksum": "585f8cbdf0ec73000f9227dcffbef71e9552ea4a",
"ctime": 1739032741.2976477,
"dev": 51714,
"device_type": 0,
"executable": false,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 201326806,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "text/plain",
"mode": "0644",
"mtime": 1739032741.0346463,
"nlink": 1,
"path": "/etc/containers/systemd/quadlet-demo-mysql.volume",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 9,
"uid": 0,
"version": "4176130442",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": false,
"xoth": false,
"xusr": false
}
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38
Saturday 08 February 2025 11:40:50 -0500 (0:00:00.381) 0:02:25.865 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Slurp quadlet file] *******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6
Saturday 08 February 2025 11:40:50 -0500 (0:00:00.060) 0:02:25.925 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12
Saturday 08 February 2025 11:40:51 -0500 (0:00:00.421) 0:02:26.347 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44
Saturday 08 February 2025 11:40:51 -0500 (0:00:00.055) 0:02:26.403 *****
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Reset raw variable] *******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52
Saturday 08 February 2025 11:40:51 -0500 (0:00:00.033) 0:02:26.437 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_raw": null
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Remove quadlet file] ******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42
Saturday 08 February 2025 11:40:51 -0500 (0:00:00.033) 0:02:26.470 *****
changed: [managed-node1] => {
"changed": true,
"path": "/etc/containers/systemd/quadlet-demo-mysql.volume",
"state": "absent"
}
TASK [fedora.linux_system_roles.podman : Refresh systemd] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48
Saturday 08 February 2025 11:40:51 -0500 (0:00:00.382) 0:02:26.853 *****
ok: [managed-node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Remove managed resource] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58
Saturday 08 February 2025 11:40:52 -0500 (0:00:00.723) 0:02:27.576 *****
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Remove volumes] ***********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99
Saturday 08 February 2025 11:40:52 -0500 (0:00:00.427) 0:02:28.004 *****
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] *********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116
Saturday 08 February 2025 11:40:53 -0500 (0:00:00.046) 0:02:28.051 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_parsed": null
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120
Saturday 08 February 2025 11:40:53 -0500 (0:00:00.033) 0:02:28.084 *****
changed: [managed-node1] => {
"changed": true,
"cmd": [
"podman",
"image",
"prune",
"--all",
"-f"
],
"delta": "0:00:00.027166",
"end": "2025-02-08 11:40:53.403791",
"rc": 0,
"start": "2025-02-08 11:40:53.376625"
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131
Saturday 08 February 2025 11:40:53 -0500 (0:00:00.410) 0:02:28.494 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 08 February 2025 11:40:53 -0500 (0:00:00.071) 0:02:28.565 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 08 February 2025 11:40:53 -0500 (0:00:00.033) 0:02:28.599 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 08 February 2025 11:40:53 -0500 (0:00:00.032) 0:02:28.631 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - images] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141
Saturday 08 February 2025 11:40:53 -0500 (0:00:00.031) 0:02:28.663 *****
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"images",
"-n"
],
"delta": "0:00:00.030182",
"end": "2025-02-08 11:40:53.996347",
"rc": 0,
"start": "2025-02-08 11:40:53.966165"
}
STDOUT:
quay.io/libpod/registry 2.8.2 0030ba3d620c 18 months ago 24.6 MB
TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150
Saturday 08 February 2025 11:40:54 -0500 (0:00:00.417) 0:02:29.080 *****
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"volume",
"ls",
"-n"
],
"delta": "0:00:00.027891",
"end": "2025-02-08 11:40:54.404946",
"rc": 0,
"start": "2025-02-08 11:40:54.377055"
}
STDOUT:
local 2ce71b63066ed4402ab34684197d9d1868b43cd94dbf701d92e842ee487e7893
TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159
Saturday 08 February 2025 11:40:54 -0500 (0:00:00.457) 0:02:29.537 *****
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"ps",
"--noheading"
],
"delta": "0:00:00.033290",
"end": "2025-02-08 11:40:54.870679",
"rc": 0,
"start": "2025-02-08 11:40:54.837389"
}
STDOUT:
93148221a3fd quay.io/libpod/registry:2.8.2 /etc/docker/regis... 6 minutes ago Up 6 minutes 127.0.0.1:5000->5000/tcp podman_registry
TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168
Saturday 08 February 2025 11:40:54 -0500 (0:00:00.415) 0:02:29.953 *****
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"network",
"ls",
"-n",
"-q"
],
"delta": "0:00:00.027320",
"end": "2025-02-08 11:40:55.276626",
"rc": 0,
"start": "2025-02-08 11:40:55.249306"
}
STDOUT:
podman
podman-default-kube-network
systemd-quadlet-demo
TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177
Saturday 08 February 2025 11:40:55 -0500 (0:00:00.411) 0:02:30.365 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187
Saturday 08 February 2025 11:40:55 -0500 (0:00:00.424) 0:02:30.790 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - services] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197
Saturday 08 February 2025 11:40:56 -0500 (0:00:00.416) 0:02:31.207 *****
ok: [managed-node1] => {
"ansible_facts": {
"services": {
"NetworkManager-dispatcher.service": {
"name": "NetworkManager-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"NetworkManager-wait-online.service": {
"name": "NetworkManager-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"NetworkManager.service": {
"name": "NetworkManager.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"apt-daily.service": {
"name": "apt-daily.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"audit-rules.service": {
"name": "audit-rules.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"auditd.service": {
"name": "auditd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"auth-rpcgss-module.service": {
"name": "auth-rpcgss-module.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"autofs.service": {
"name": "autofs.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"autovt@.service": {
"name": "autovt@.service",
"source": "systemd",
"state": "unknown",
"status": "alias"
},
"blk-availability.service": {
"name": "blk-availability.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"capsule@.service": {
"name": "capsule@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"certmonger.service": {
"name": "certmonger.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"chrony-wait.service": {
"name": "chrony-wait.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"chronyd-restricted.service": {
"name": "chronyd-restricted.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"chronyd.service": {
"name": "chronyd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"cloud-config.service": {
"name": "cloud-config.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-final.service": {
"name": "cloud-final.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init-hotplugd.service": {
"name": "cloud-init-hotplugd.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"cloud-init-local.service": {
"name": "cloud-init-local.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init.service": {
"name": "cloud-init.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"console-getty.service": {
"name": "console-getty.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"container-getty@.service": {
"name": "container-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"crond.service": {
"name": "crond.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"dbus-broker.service": {
"name": "dbus-broker.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"dbus-org.fedoraproject.FirewallD1.service": {
"name": "dbus-org.fedoraproject.FirewallD1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.hostname1.service": {
"name": "dbus-org.freedesktop.hostname1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.locale1.service": {
"name": "dbus-org.freedesktop.locale1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.login1.service": {
"name": "dbus-org.freedesktop.login1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.nm-dispatcher.service": {
"name": "dbus-org.freedesktop.nm-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.timedate1.service": {
"name": "dbus-org.freedesktop.timedate1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus.service": {
"name": "dbus.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"debug-shell.service": {
"name": "debug-shell.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dhcpcd.service": {
"name": "dhcpcd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dhcpcd@.service": {
"name": "dhcpcd@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"display-manager.service": {
"name": "display-manager.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"dm-event.service": {
"name": "dm-event.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dnf-makecache.service": {
"name": "dnf-makecache.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dnf-system-upgrade-cleanup.service": {
"name": "dnf-system-upgrade-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dnf-system-upgrade.service": {
"name": "dnf-system-upgrade.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dracut-cmdline.service": {
"name": "dracut-cmdline.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-initqueue.service": {
"name": "dracut-initqueue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-mount.service": {
"name": "dracut-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-mount.service": {
"name": "dracut-pre-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-pivot.service": {
"name": "dracut-pre-pivot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-trigger.service": {
"name": "dracut-pre-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-udev.service": {
"name": "dracut-pre-udev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown-onfailure.service": {
"name": "dracut-shutdown-onfailure.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown.service": {
"name": "dracut-shutdown.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ebtables.service": {
"name": "ebtables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"emergency.service": {
"name": "emergency.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"fips-crypto-policy-overlay.service": {
"name": "fips-crypto-policy-overlay.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"firewalld.service": {
"name": "firewalld.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"fsidd.service": {
"name": "fsidd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"fstrim.service": {
"name": "fstrim.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"getty@.service": {
"name": "getty@.service",
"source": "systemd",
"state": "unknown",
"status": "enabled"
},
"getty@tty1.service": {
"name": "getty@tty1.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"grub-boot-indeterminate.service": {
"name": "grub-boot-indeterminate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"grub2-systemd-integration.service": {
"name": "grub2-systemd-integration.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"gssproxy.service": {
"name": "gssproxy.service",
"source": "systemd",
"state": "running",
"status": "disabled"
},
"hv_kvp_daemon.service": {
"name": "hv_kvp_daemon.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"initrd-cleanup.service": {
"name": "initrd-cleanup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-parse-etc.service": {
"name": "initrd-parse-etc.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-switch-root.service": {
"name": "initrd-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-udevadm-cleanup-db.service": {
"name": "initrd-udevadm-cleanup-db.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ip6tables.service": {
"name": "ip6tables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ipset.service": {
"name": "ipset.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"iptables.service": {
"name": "iptables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"irqbalance.service": {
"name": "irqbalance.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"kdump.service": {
"name": "kdump.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"kmod-static-nodes.service": {
"name": "kmod-static-nodes.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"kvm_stat.service": {
"name": "kvm_stat.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"ldconfig.service": {
"name": "ldconfig.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"logrotate.service": {
"name": "logrotate.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"lvm-devices-import.service": {
"name": "lvm-devices-import.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"lvm2-lvmpolld.service": {
"name": "lvm2-lvmpolld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"lvm2-monitor.service": {
"name": "lvm2-monitor.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"man-db-cache-update.service": {
"name": "man-db-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"man-db-restart-cache-update.service": {
"name": "man-db-restart-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"microcode.service": {
"name": "microcode.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"modprobe@.service": {
"name": "modprobe@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"modprobe@configfs.service": {
"name": "modprobe@configfs.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@dm_mod.service": {
"name": "modprobe@dm_mod.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@drm.service": {
"name": "modprobe@drm.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@efi_pstore.service": {
"name": "modprobe@efi_pstore.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@fuse.service": {
"name": "modprobe@fuse.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@loop.service": {
"name": "modprobe@loop.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"netavark-dhcp-proxy.service": {
"name": "netavark-dhcp-proxy.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"netavark-firewalld-reload.service": {
"name": "netavark-firewalld-reload.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-blkmap.service": {
"name": "nfs-blkmap.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-idmapd.service": {
"name": "nfs-idmapd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-mountd.service": {
"name": "nfs-mountd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-server.service": {
"name": "nfs-server.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"nfs-utils.service": {
"name": "nfs-utils.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfsdcld.service": {
"name": "nfsdcld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nftables.service": {
"name": "nftables.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nis-domainname.service": {
"name": "nis-domainname.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nm-priv-helper.service": {
"name": "nm-priv-helper.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"ntpd.service": {
"name": "ntpd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ntpdate.service": {
"name": "ntpdate.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"pam_namespace.service": {
"name": "pam_namespace.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"pcscd.service": {
"name": "pcscd.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"plymouth-quit-wait.service": {
"name": "plymouth-quit-wait.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"plymouth-start.service": {
"name": "plymouth-start.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"podman-auto-update.service": {
"name": "podman-auto-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman-clean-transient.service": {
"name": "podman-clean-transient.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman-kube@.service": {
"name": "podman-kube@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"podman-restart.service": {
"name": "podman-restart.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman.service": {
"name": "podman.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"polkit.service": {
"name": "polkit.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"qemu-guest-agent.service": {
"name": "qemu-guest-agent.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"quadlet-demo-network.service": {
"name": "quadlet-demo-network.service",
"source": "systemd",
"state": "stopped",
"status": "generated"
},
"quotaon-root.service": {
"name": "quotaon-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"quotaon@.service": {
"name": "quotaon@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"rc-local.service": {
"name": "rc-local.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rescue.service": {
"name": "rescue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"restraintd.service": {
"name": "restraintd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rngd.service": {
"name": "rngd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rpc-gssd.service": {
"name": "rpc-gssd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd-notify.service": {
"name": "rpc-statd-notify.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd.service": {
"name": "rpc-statd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-svcgssd.service": {
"name": "rpc-svcgssd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"rpcbind.service": {
"name": "rpcbind.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rpmdb-migrate.service": {
"name": "rpmdb-migrate.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"rpmdb-rebuild.service": {
"name": "rpmdb-rebuild.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"rsyslog.service": {
"name": "rsyslog.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"selinux-autorelabel-mark.service": {
"name": "selinux-autorelabel-mark.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"selinux-autorelabel.service": {
"name": "selinux-autorelabel.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"selinux-check-proper-disable.service": {
"name": "selinux-check-proper-disable.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"serial-getty@.service": {
"name": "serial-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "indirect"
},
"serial-getty@ttyS0.service": {
"name": "serial-getty@ttyS0.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"sntp.service": {
"name": "sntp.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ssh-host-keys-migration.service": {
"name": "ssh-host-keys-migration.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"sshd-keygen.service": {
"name": "sshd-keygen.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"sshd-keygen@.service": {
"name": "sshd-keygen@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"sshd-keygen@ecdsa.service": {
"name": "sshd-keygen@ecdsa.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-keygen@ed25519.service": {
"name": "sshd-keygen@ed25519.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-keygen@rsa.service": {
"name": "sshd-keygen@rsa.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-unix-local@.service": {
"name": "sshd-unix-local@.service",
"source": "systemd",
"state": "unknown",
"status": "alias"
},
"sshd-vsock@.service": {
"name": "sshd-vsock@.service",
"source": "systemd",
"state": "unknown",
"status": "alias"
},
"sshd.service": {
"name": "sshd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"sshd@.service": {
"name": "sshd@.service",
"source": "systemd",
"state": "unknown",
"status": "indirect"
},
"sssd-autofs.service": {
"name": "sssd-autofs.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-kcm.service": {
"name": "sssd-kcm.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"sssd-nss.service": {
"name": "sssd-nss.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pac.service": {
"name": "sssd-pac.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pam.service": {
"name": "sssd-pam.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-ssh.service": {
"name": "sssd-ssh.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-sudo.service": {
"name": "sssd-sudo.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd.service": {
"name": "sssd.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"syslog.service": {
"name": "syslog.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"system-update-cleanup.service": {
"name": "system-update-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-ask-password-console.service": {
"name": "systemd-ask-password-console.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-wall.service": {
"name": "systemd-ask-password-wall.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-backlight@.service": {
"name": "systemd-backlight@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-battery-check.service": {
"name": "systemd-battery-check.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-binfmt.service": {
"name": "systemd-binfmt.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-bless-boot.service": {
"name": "systemd-bless-boot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-boot-check-no-failures.service": {
"name": "systemd-boot-check-no-failures.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-boot-random-seed.service": {
"name": "systemd-boot-random-seed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-boot-update.service": {
"name": "systemd-boot-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-bootctl@.service": {
"name": "systemd-bootctl@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-confext.service": {
"name": "systemd-confext.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-coredump@.service": {
"name": "systemd-coredump@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-creds@.service": {
"name": "systemd-creds@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-exit.service": {
"name": "systemd-exit.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-firstboot.service": {
"name": "systemd-firstboot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck-root.service": {
"name": "systemd-fsck-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck@.service": {
"name": "systemd-fsck@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-growfs-root.service": {
"name": "systemd-growfs-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-growfs@.service": {
"name": "systemd-growfs@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-halt.service": {
"name": "systemd-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hibernate-clear.service": {
"name": "systemd-hibernate-clear.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hibernate-resume.service": {
"name": "systemd-hibernate-resume.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hibernate.service": {
"name": "systemd-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hostnamed.service": {
"name": "systemd-hostnamed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hwdb-update.service": {
"name": "systemd-hwdb-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hybrid-sleep.service": {
"name": "systemd-hybrid-sleep.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-initctl.service": {
"name": "systemd-initctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-catalog-update.service": {
"name": "systemd-journal-catalog-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-flush.service": {
"name": "systemd-journal-flush.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journald-sync@.service": {
"name": "systemd-journald-sync@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-journald.service": {
"name": "systemd-journald.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-journald@.service": {
"name": "systemd-journald@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-kexec.service": {
"name": "systemd-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-localed.service": {
"name": "systemd-localed.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-logind.service": {
"name": "systemd-logind.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-machine-id-commit.service": {
"name": "systemd-machine-id-commit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-modules-load.service": {
"name": "systemd-modules-load.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-network-generator.service": {
"name": "systemd-network-generator.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-networkd-wait-online.service": {
"name": "systemd-networkd-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"systemd-oomd.service": {
"name": "systemd-oomd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"systemd-pcrextend@.service": {
"name": "systemd-pcrextend@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-pcrfs-root.service": {
"name": "systemd-pcrfs-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-pcrfs@.service": {
"name": "systemd-pcrfs@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-pcrlock-file-system.service": {
"name": "systemd-pcrlock-file-system.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-firmware-code.service": {
"name": "systemd-pcrlock-firmware-code.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-firmware-config.service": {
"name": "systemd-pcrlock-firmware-config.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-machine-id.service": {
"name": "systemd-pcrlock-machine-id.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-make-policy.service": {
"name": "systemd-pcrlock-make-policy.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-secureboot-authority.service": {
"name": "systemd-pcrlock-secureboot-authority.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-secureboot-policy.service": {
"name": "systemd-pcrlock-secureboot-policy.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock@.service": {
"name": "systemd-pcrlock@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-pcrmachine.service": {
"name": "systemd-pcrmachine.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-pcrphase-initrd.service": {
"name": "systemd-pcrphase-initrd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-pcrphase-sysinit.service": {
"name": "systemd-pcrphase-sysinit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-pcrphase.service": {
"name": "systemd-pcrphase.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-poweroff.service": {
"name": "systemd-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-pstore.service": {
"name": "systemd-pstore.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-quotacheck-root.service": {
"name": "systemd-quotacheck-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-quotacheck@.service": {
"name": "systemd-quotacheck@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-random-seed.service": {
"name": "systemd-random-seed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-reboot.service": {
"name": "systemd-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-remount-fs.service": {
"name": "systemd-remount-fs.service",
"source": "systemd",
"state": "stopped",
"status": "enabled-runtime"
},
"systemd-repart.service": {
"name": "systemd-repart.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-rfkill.service": {
"name": "systemd-rfkill.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-soft-reboot.service": {
"name": "systemd-soft-reboot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-suspend-then-hibernate.service": {
"name": "systemd-suspend-then-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend.service": {
"name": "systemd-suspend.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-sysctl.service": {
"name": "systemd-sysctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-sysext.service": {
"name": "systemd-sysext.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-sysext@.service": {
"name": "systemd-sysext@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-sysupdate-reboot.service": {
"name": "systemd-sysupdate-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"systemd-sysupdate.service": {
"name": "systemd-sysupdate.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"systemd-sysusers.service": {
"name": "systemd-sysusers.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-timedated.service": {
"name": "systemd-timedated.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-timesyncd.service": {
"name": "systemd-timesyncd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"systemd-tmpfiles-clean.service": {
"name": "systemd-tmpfiles-clean.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup-dev-early.service": {
"name": "systemd-tmpfiles-setup-dev-early.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup-dev.service": {
"name": "systemd-tmpfiles-setup-dev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup.service": {
"name": "systemd-tmpfiles-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tpm2-setup-early.service": {
"name": "systemd-tpm2-setup-early.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tpm2-setup.service": {
"name": "systemd-tpm2-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udev-load-credentials.service": {
"name": "systemd-udev-load-credentials.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"systemd-udev-settle.service": {
"name": "systemd-udev-settle.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udev-trigger.service": {
"name": "systemd-udev-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udevd.service": {
"name": "systemd-udevd.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-update-done.service": {
"name": "systemd-update-done.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp-runlevel.service": {
"name": "systemd-update-utmp-runlevel.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp.service": {
"name": "systemd-update-utmp.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-user-sessions.service": {
"name": "systemd-user-sessions.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-vconsole-setup.service": {
"name": "systemd-vconsole-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-volatile-root.service": {
"name": "systemd-volatile-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"user-runtime-dir@.service": {
"name": "user-runtime-dir@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user-runtime-dir@0.service": {
"name": "user-runtime-dir@0.service",
"source": "systemd",
"state": "stopped",
"status": "active"
},
"user@.service": {
"name": "user@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user@0.service": {
"name": "user@0.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"ypbind.service": {
"name": "ypbind.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
}
}
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116
Saturday 08 February 2025 11:40:58 -0500 (0:00:02.058) 0:02:33.265 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state != \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Saturday 08 February 2025 11:40:58 -0500 (0:00:00.053) 0:02:33.319 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "quadlet-demo.network",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "[Network]\nSubnet=192.168.30.0/24\nGateway=192.168.30.1\nLabel=app=wordpress",
"__podman_quadlet_template_src": ""
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Saturday 08 February 2025 11:40:58 -0500 (0:00:00.050) 0:02:33.370 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "absent",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Saturday 08 February 2025 11:40:58 -0500 (0:00:00.053) 0:02:33.423 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Saturday 08 February 2025 11:40:58 -0500 (0:00:00.036) 0:02:33.460 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo",
"__podman_quadlet_type": "network",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Saturday 08 February 2025 11:40:58 -0500 (0:00:00.048) 0:02:33.509 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 08 February 2025 11:40:58 -0500 (0:00:00.062) 0:02:33.571 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 08 February 2025 11:40:58 -0500 (0:00:00.037) 0:02:33.609 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 08 February 2025 11:40:58 -0500 (0:00:00.040) 0:02:33.650 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Saturday 08 February 2025 11:40:58 -0500 (0:00:00.072) 0:02:33.722 *****
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1739032452.5561087,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97",
"ctime": 1739032446.715072,
"dev": 51714,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 9150722,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-pie-executable",
"mode": "0755",
"mtime": 1730678400.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 15744,
"uid": 0,
"version": "796201794",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Saturday 08 February 2025 11:40:59 -0500 (0:00:00.496) 0:02:34.219 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Saturday 08 February 2025 11:40:59 -0500 (0:00:00.061) 0:02:34.281 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Saturday 08 February 2025 11:40:59 -0500 (0:00:00.057) 0:02:34.338 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Saturday 08 February 2025 11:40:59 -0500 (0:00:00.058) 0:02:34.397 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Saturday 08 February 2025 11:40:59 -0500 (0:00:00.059) 0:02:34.457 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Saturday 08 February 2025 11:40:59 -0500 (0:00:00.057) 0:02:34.514 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Saturday 08 February 2025 11:40:59 -0500 (0:00:00.058) 0:02:34.573 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Saturday 08 February 2025 11:40:59 -0500 (0:00:00.059) 0:02:34.633 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Saturday 08 February 2025 11:40:59 -0500 (0:00:00.041) 0:02:34.674 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "quadlet-demo-network.service",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Saturday 08 February 2025 11:40:59 -0500 (0:00:00.072) 0:02:34.747 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Saturday 08 February 2025 11:40:59 -0500 (0:00:00.041) 0:02:34.788 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state != \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Saturday 08 February 2025 11:40:59 -0500 (0:00:00.030) 0:02:34.819 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.network",
"__podman_volumes": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105
Saturday 08 February 2025 11:40:59 -0500 (0:00:00.084) 0:02:34.903 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112
Saturday 08 February 2025 11:40:59 -0500 (0:00:00.039) 0:02:34.943 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4
Saturday 08 February 2025 11:41:00 -0500 (0:00:00.076) 0:02:35.020 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stop and disable service] *************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12
Saturday 08 February 2025 11:41:00 -0500 (0:00:00.077) 0:02:35.097 *****
changed: [managed-node1] => {
"changed": true,
"enabled": false,
"failed_when_result": false,
"name": "quadlet-demo-network.service",
"state": "stopped",
"status": {
"AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0",
"ActiveEnterTimestamp": "Sat 2025-02-08 11:38:57 EST",
"ActiveEnterTimestampMonotonic": "670644890",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "active",
"After": "system.slice sysinit.target systemd-journald.socket network-online.target -.mount basic.target",
"AllowIsolate": "no",
"AssertResult": "yes",
"AssertTimestamp": "Sat 2025-02-08 11:38:57 EST",
"AssertTimestampMonotonic": "670599460",
"Before": "shutdown.target",
"BindLogSockets": "no",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "yes",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "39018000",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanLiveMount": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
"CleanResult": "success",
"CollectMode": "inactive",
"ConditionResult": "yes",
"ConditionTimestamp": "Sat 2025-02-08 11:38:57 EST",
"ConditionTimestampMonotonic": "670599457",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlGroupId": "142191",
"ControlPID": "0",
"CoredumpFilter": "0x33",
"CoredumpReceive": "no",
"DebugInvocation": "no",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"DefaultStartupMemoryLow": "0",
"Delegate": "no",
"Description": "quadlet-demo-network.service",
"DevicePolicy": "auto",
"DynamicUser": "no",
"EffectiveMemoryHigh": "3698229248",
"EffectiveMemoryMax": "3698229248",
"EffectiveTasksMax": "22364",
"ExecMainCode": "1",
"ExecMainExitTimestamp": "Sat 2025-02-08 11:38:57 EST",
"ExecMainExitTimestampMonotonic": "670644718",
"ExecMainHandoffTimestamp": "Sat 2025-02-08 11:38:57 EST",
"ExecMainHandoffTimestampMonotonic": "670612448",
"ExecMainPID": "65088",
"ExecMainStartTimestamp": "Sat 2025-02-08 11:38:57 EST",
"ExecMainStartTimestampMonotonic": "670600155",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExitType": "main",
"ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FileDescriptorStorePreserve": "restart",
"FinalKillSignal": "9",
"FragmentPath": "/run/systemd/generator/quadlet-demo-network.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOReadBytes": "[not set]",
"IOReadOperations": "[not set]",
"IOSchedulingClass": "2",
"IOSchedulingPriority": "4",
"IOWeight": "[not set]",
"IOWriteBytes": "[not set]",
"IOWriteOperations": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "[no data]",
"IPEgressPackets": "[no data]",
"IPIngressBytes": "[no data]",
"IPIngressPackets": "[no data]",
"Id": "quadlet-demo-network.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestamp": "Sat 2025-02-08 11:38:57 EST",
"InactiveExitTimestampMonotonic": "670600610",
"InvocationID": "fe6cc76a5ce6465fba49b0264f4cf4da",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "control-group",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "infinity",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "8388608",
"LimitMEMLOCKSoft": "8388608",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "524288",
"LimitNOFILESoft": "1024",
"LimitNPROC": "13977",
"LimitNPROCSoft": "13977",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "13977",
"LimitSIGPENDINGSoft": "13977",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LiveMountResult": "success",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"ManagedOOMMemoryPressure": "auto",
"ManagedOOMMemoryPressureDurationUSec": "[not set]",
"ManagedOOMMemoryPressureLimit": "0",
"ManagedOOMPreference": "none",
"ManagedOOMSwap": "auto",
"MemoryAccounting": "yes",
"MemoryAvailable": "3162804224",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryKSM": "no",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemoryPeak": "16445440",
"MemoryPressureThresholdUSec": "200ms",
"MemoryPressureWatch": "auto",
"MemorySwapCurrent": "[not set]",
"MemorySwapMax": "infinity",
"MemorySwapPeak": "0",
"MemoryZSwapCurrent": "[not set]",
"MemoryZSwapMax": "infinity",
"MemoryZSwapWriteback": "yes",
"MountAPIVFS": "no",
"MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAPolicy": "n/a",
"Names": "quadlet-demo-network.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMPolicy": "stop",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"OnSuccessJobMode": "fail",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateIPC": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivatePIDs": "no",
"PrivateTmp": "no",
"PrivateTmpEx": "no",
"PrivateUsers": "no",
"PrivateUsersEx": "no",
"ProcSubset": "all",
"ProtectClock": "no",
"ProtectControlGroups": "no",
"ProtectControlGroupsEx": "no",
"ProtectHome": "no",
"ProtectHostname": "no",
"ProtectKernelLogs": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectProc": "default",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"ReloadResult": "success",
"ReloadSignal": "1",
"RemainAfterExit": "yes",
"RemoveIPC": "no",
"Requires": "sysinit.target system.slice -.mount",
"RequiresMountsFor": "/run/containers",
"Restart": "no",
"RestartKillSignal": "15",
"RestartMaxDelayUSec": "infinity",
"RestartMode": "normal",
"RestartSteps": "0",
"RestartUSec": "100ms",
"RestartUSecNext": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RootEphemeral": "no",
"RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"RuntimeRandomizedExtraUSec": "0",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"SetLoginEnvironment": "no",
"Slice": "system.slice",
"SourcePath": "/etc/containers/systemd/quadlet-demo.network",
"StandardError": "inherit",
"StandardInput": "null",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StartupMemoryHigh": "infinity",
"StartupMemoryLow": "0",
"StartupMemoryMax": "infinity",
"StartupMemorySwapMax": "infinity",
"StartupMemoryZSwapMax": "infinity",
"StateChangeTimestamp": "Sat 2025-02-08 11:38:57 EST",
"StateChangeTimestampMonotonic": "670644890",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "exited",
"SuccessAction": "none",
"SurviveFinalKillSignal": "no",
"SyslogFacility": "3",
"SyslogIdentifier": "quadlet-demo-network",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "2147483646",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "22364",
"TimeoutAbortUSec": "1min 30s",
"TimeoutCleanUSec": "infinity",
"TimeoutStartFailureMode": "terminate",
"TimeoutStartUSec": "infinity",
"TimeoutStopFailureMode": "terminate",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "oneshot",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "generated",
"UtmpMode": "init",
"Wants": "network-online.target",
"WatchdogSignal": "6",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.podman : See if quadlet file exists] ***********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33
Saturday 08 February 2025 11:41:00 -0500 (0:00:00.796) 0:02:35.893 *****
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1739032735.9616196,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 8,
"charset": "us-ascii",
"checksum": "e57c08d49aff4bae8daab138d913aeddaa8682a0",
"ctime": 1739032735.9636197,
"dev": 51714,
"device_type": 0,
"executable": false,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 155189475,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "text/plain",
"mode": "0644",
"mtime": 1739032735.5436175,
"nlink": 1,
"path": "/etc/containers/systemd/quadlet-demo.network",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 74,
"uid": 0,
"version": "1638805476",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": false,
"xoth": false,
"xusr": false
}
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38
Saturday 08 February 2025 11:41:01 -0500 (0:00:00.401) 0:02:36.294 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Slurp quadlet file] *******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6
Saturday 08 February 2025 11:41:01 -0500 (0:00:00.082) 0:02:36.377 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12
Saturday 08 February 2025 11:41:01 -0500 (0:00:00.393) 0:02:36.771 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44
Saturday 08 February 2025 11:41:01 -0500 (0:00:00.071) 0:02:36.842 *****
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Reset raw variable] *******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52
Saturday 08 February 2025 11:41:01 -0500 (0:00:00.047) 0:02:36.890 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_raw": null
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Remove quadlet file] ******************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42
Saturday 08 February 2025 11:41:01 -0500 (0:00:00.042) 0:02:36.933 *****
changed: [managed-node1] => {
"changed": true,
"path": "/etc/containers/systemd/quadlet-demo.network",
"state": "absent"
}
TASK [fedora.linux_system_roles.podman : Refresh systemd] **********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48
Saturday 08 February 2025 11:41:02 -0500 (0:00:00.377) 0:02:37.310 *****
ok: [managed-node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Remove managed resource] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58
Saturday 08 February 2025 11:41:03 -0500 (0:00:00.754) 0:02:38.065 *****
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Remove volumes] ***********************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99
Saturday 08 February 2025 11:41:03 -0500 (0:00:00.458) 0:02:38.524 *****
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] *********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116
Saturday 08 February 2025 11:41:03 -0500 (0:00:00.078) 0:02:38.603 *****
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_parsed": null
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120
Saturday 08 February 2025 11:41:03 -0500 (0:00:00.054) 0:02:38.657 *****
changed: [managed-node1] => {
"changed": true,
"cmd": [
"podman",
"image",
"prune",
"--all",
"-f"
],
"delta": "0:00:00.028501",
"end": "2025-02-08 11:41:03.997800",
"rc": 0,
"start": "2025-02-08 11:41:03.969299"
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131
Saturday 08 February 2025 11:41:04 -0500 (0:00:00.428) 0:02:39.086 *****
included: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 08 February 2025 11:41:04 -0500 (0:00:00.096) 0:02:39.182 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 08 February 2025 11:41:04 -0500 (0:00:00.095) 0:02:39.278 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 08 February 2025 11:41:04 -0500 (0:00:00.038) 0:02:39.316 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - images] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141
Saturday 08 February 2025 11:41:04 -0500 (0:00:00.032) 0:02:39.349 *****
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"images",
"-n"
],
"delta": "0:00:00.029592",
"end": "2025-02-08 11:41:04.674393",
"rc": 0,
"start": "2025-02-08 11:41:04.644801"
}
STDOUT:
quay.io/libpod/registry 2.8.2 0030ba3d620c 18 months ago 24.6 MB
TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150
Saturday 08 February 2025 11:41:04 -0500 (0:00:00.408) 0:02:39.757 *****
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"volume",
"ls",
"-n"
],
"delta": "0:00:00.030280",
"end": "2025-02-08 11:41:05.078256",
"rc": 0,
"start": "2025-02-08 11:41:05.047976"
}
STDOUT:
local 2ce71b63066ed4402ab34684197d9d1868b43cd94dbf701d92e842ee487e7893
TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159
Saturday 08 February 2025 11:41:05 -0500 (0:00:00.405) 0:02:40.163 *****
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"ps",
"--noheading"
],
"delta": "0:00:00.032330",
"end": "2025-02-08 11:41:05.487922",
"rc": 0,
"start": "2025-02-08 11:41:05.455592"
}
STDOUT:
93148221a3fd quay.io/libpod/registry:2.8.2 /etc/docker/regis... 6 minutes ago Up 6 minutes 127.0.0.1:5000->5000/tcp podman_registry
TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168
Saturday 08 February 2025 11:41:05 -0500 (0:00:00.408) 0:02:40.571 *****
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"network",
"ls",
"-n",
"-q"
],
"delta": "0:00:00.028730",
"end": "2025-02-08 11:41:05.891114",
"rc": 0,
"start": "2025-02-08 11:41:05.862384"
}
STDOUT:
podman
podman-default-kube-network
TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177
Saturday 08 February 2025 11:41:05 -0500 (0:00:00.405) 0:02:40.976 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187
Saturday 08 February 2025 11:41:06 -0500 (0:00:00.406) 0:02:41.383 *****
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - services] ***
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197
Saturday 08 February 2025 11:41:06 -0500 (0:00:00.397) 0:02:41.781 *****
ok: [managed-node1] => {
"ansible_facts": {
"services": {
"NetworkManager-dispatcher.service": {
"name": "NetworkManager-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"NetworkManager-wait-online.service": {
"name": "NetworkManager-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"NetworkManager.service": {
"name": "NetworkManager.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"apt-daily.service": {
"name": "apt-daily.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"audit-rules.service": {
"name": "audit-rules.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"auditd.service": {
"name": "auditd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"auth-rpcgss-module.service": {
"name": "auth-rpcgss-module.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"autofs.service": {
"name": "autofs.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"autovt@.service": {
"name": "autovt@.service",
"source": "systemd",
"state": "unknown",
"status": "alias"
},
"blk-availability.service": {
"name": "blk-availability.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"capsule@.service": {
"name": "capsule@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"certmonger.service": {
"name": "certmonger.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"chrony-wait.service": {
"name": "chrony-wait.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"chronyd-restricted.service": {
"name": "chronyd-restricted.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"chronyd.service": {
"name": "chronyd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"cloud-config.service": {
"name": "cloud-config.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-final.service": {
"name": "cloud-final.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init-hotplugd.service": {
"name": "cloud-init-hotplugd.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"cloud-init-local.service": {
"name": "cloud-init-local.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init.service": {
"name": "cloud-init.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"console-getty.service": {
"name": "console-getty.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"container-getty@.service": {
"name": "container-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"crond.service": {
"name": "crond.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"dbus-broker.service": {
"name": "dbus-broker.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"dbus-org.fedoraproject.FirewallD1.service": {
"name": "dbus-org.fedoraproject.FirewallD1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.hostname1.service": {
"name": "dbus-org.freedesktop.hostname1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.locale1.service": {
"name": "dbus-org.freedesktop.locale1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.login1.service": {
"name": "dbus-org.freedesktop.login1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.nm-dispatcher.service": {
"name": "dbus-org.freedesktop.nm-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.timedate1.service": {
"name": "dbus-org.freedesktop.timedate1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus.service": {
"name": "dbus.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"debug-shell.service": {
"name": "debug-shell.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dhcpcd.service": {
"name": "dhcpcd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dhcpcd@.service": {
"name": "dhcpcd@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"display-manager.service": {
"name": "display-manager.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"dm-event.service": {
"name": "dm-event.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dnf-makecache.service": {
"name": "dnf-makecache.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dnf-system-upgrade-cleanup.service": {
"name": "dnf-system-upgrade-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dnf-system-upgrade.service": {
"name": "dnf-system-upgrade.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dracut-cmdline.service": {
"name": "dracut-cmdline.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-initqueue.service": {
"name": "dracut-initqueue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-mount.service": {
"name": "dracut-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-mount.service": {
"name": "dracut-pre-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-pivot.service": {
"name": "dracut-pre-pivot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-trigger.service": {
"name": "dracut-pre-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-udev.service": {
"name": "dracut-pre-udev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown-onfailure.service": {
"name": "dracut-shutdown-onfailure.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown.service": {
"name": "dracut-shutdown.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ebtables.service": {
"name": "ebtables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"emergency.service": {
"name": "emergency.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"fips-crypto-policy-overlay.service": {
"name": "fips-crypto-policy-overlay.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"firewalld.service": {
"name": "firewalld.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"fsidd.service": {
"name": "fsidd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"fstrim.service": {
"name": "fstrim.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"getty@.service": {
"name": "getty@.service",
"source": "systemd",
"state": "unknown",
"status": "enabled"
},
"getty@tty1.service": {
"name": "getty@tty1.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"grub-boot-indeterminate.service": {
"name": "grub-boot-indeterminate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"grub2-systemd-integration.service": {
"name": "grub2-systemd-integration.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"gssproxy.service": {
"name": "gssproxy.service",
"source": "systemd",
"state": "running",
"status": "disabled"
},
"hv_kvp_daemon.service": {
"name": "hv_kvp_daemon.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"initrd-cleanup.service": {
"name": "initrd-cleanup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-parse-etc.service": {
"name": "initrd-parse-etc.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-switch-root.service": {
"name": "initrd-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-udevadm-cleanup-db.service": {
"name": "initrd-udevadm-cleanup-db.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ip6tables.service": {
"name": "ip6tables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ipset.service": {
"name": "ipset.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"iptables.service": {
"name": "iptables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"irqbalance.service": {
"name": "irqbalance.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"kdump.service": {
"name": "kdump.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"kmod-static-nodes.service": {
"name": "kmod-static-nodes.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"kvm_stat.service": {
"name": "kvm_stat.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"ldconfig.service": {
"name": "ldconfig.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"logrotate.service": {
"name": "logrotate.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"lvm-devices-import.service": {
"name": "lvm-devices-import.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"lvm2-lvmpolld.service": {
"name": "lvm2-lvmpolld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"lvm2-monitor.service": {
"name": "lvm2-monitor.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"man-db-cache-update.service": {
"name": "man-db-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"man-db-restart-cache-update.service": {
"name": "man-db-restart-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"microcode.service": {
"name": "microcode.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"modprobe@.service": {
"name": "modprobe@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"modprobe@configfs.service": {
"name": "modprobe@configfs.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@dm_mod.service": {
"name": "modprobe@dm_mod.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@drm.service": {
"name": "modprobe@drm.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@efi_pstore.service": {
"name": "modprobe@efi_pstore.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@fuse.service": {
"name": "modprobe@fuse.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@loop.service": {
"name": "modprobe@loop.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"netavark-dhcp-proxy.service": {
"name": "netavark-dhcp-proxy.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"netavark-firewalld-reload.service": {
"name": "netavark-firewalld-reload.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-blkmap.service": {
"name": "nfs-blkmap.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-idmapd.service": {
"name": "nfs-idmapd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-mountd.service": {
"name": "nfs-mountd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-server.service": {
"name": "nfs-server.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"nfs-utils.service": {
"name": "nfs-utils.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfsdcld.service": {
"name": "nfsdcld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nftables.service": {
"name": "nftables.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nis-domainname.service": {
"name": "nis-domainname.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nm-priv-helper.service": {
"name": "nm-priv-helper.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"ntpd.service": {
"name": "ntpd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ntpdate.service": {
"name": "ntpdate.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"pam_namespace.service": {
"name": "pam_namespace.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"pcscd.service": {
"name": "pcscd.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"plymouth-quit-wait.service": {
"name": "plymouth-quit-wait.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"plymouth-start.service": {
"name": "plymouth-start.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"podman-auto-update.service": {
"name": "podman-auto-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman-clean-transient.service": {
"name": "podman-clean-transient.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman-kube@.service": {
"name": "podman-kube@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"podman-restart.service": {
"name": "podman-restart.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman.service": {
"name": "podman.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"polkit.service": {
"name": "polkit.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"qemu-guest-agent.service": {
"name": "qemu-guest-agent.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"quotaon-root.service": {
"name": "quotaon-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"quotaon@.service": {
"name": "quotaon@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"rc-local.service": {
"name": "rc-local.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rescue.service": {
"name": "rescue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"restraintd.service": {
"name": "restraintd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rngd.service": {
"name": "rngd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rpc-gssd.service": {
"name": "rpc-gssd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd-notify.service": {
"name": "rpc-statd-notify.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd.service": {
"name": "rpc-statd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-svcgssd.service": {
"name": "rpc-svcgssd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"rpcbind.service": {
"name": "rpcbind.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rpmdb-migrate.service": {
"name": "rpmdb-migrate.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"rpmdb-rebuild.service": {
"name": "rpmdb-rebuild.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"rsyslog.service": {
"name": "rsyslog.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"selinux-autorelabel-mark.service": {
"name": "selinux-autorelabel-mark.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"selinux-autorelabel.service": {
"name": "selinux-autorelabel.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"selinux-check-proper-disable.service": {
"name": "selinux-check-proper-disable.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"serial-getty@.service": {
"name": "serial-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "indirect"
},
"serial-getty@ttyS0.service": {
"name": "serial-getty@ttyS0.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"sntp.service": {
"name": "sntp.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ssh-host-keys-migration.service": {
"name": "ssh-host-keys-migration.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"sshd-keygen.service": {
"name": "sshd-keygen.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"sshd-keygen@.service": {
"name": "sshd-keygen@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"sshd-keygen@ecdsa.service": {
"name": "sshd-keygen@ecdsa.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-keygen@ed25519.service": {
"name": "sshd-keygen@ed25519.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-keygen@rsa.service": {
"name": "sshd-keygen@rsa.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-unix-local@.service": {
"name": "sshd-unix-local@.service",
"source": "systemd",
"state": "unknown",
"status": "alias"
},
"sshd-vsock@.service": {
"name": "sshd-vsock@.service",
"source": "systemd",
"state": "unknown",
"status": "alias"
},
"sshd.service": {
"name": "sshd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"sshd@.service": {
"name": "sshd@.service",
"source": "systemd",
"state": "unknown",
"status": "indirect"
},
"sssd-autofs.service": {
"name": "sssd-autofs.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-kcm.service": {
"name": "sssd-kcm.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"sssd-nss.service": {
"name": "sssd-nss.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pac.service": {
"name": "sssd-pac.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pam.service": {
"name": "sssd-pam.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-ssh.service": {
"name": "sssd-ssh.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-sudo.service": {
"name": "sssd-sudo.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd.service": {
"name": "sssd.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"syslog.service": {
"name": "syslog.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"system-update-cleanup.service": {
"name": "system-update-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-ask-password-console.service": {
"name": "systemd-ask-password-console.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-wall.service": {
"name": "systemd-ask-password-wall.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-backlight@.service": {
"name": "systemd-backlight@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-battery-check.service": {
"name": "systemd-battery-check.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-binfmt.service": {
"name": "systemd-binfmt.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-bless-boot.service": {
"name": "systemd-bless-boot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-boot-check-no-failures.service": {
"name": "systemd-boot-check-no-failures.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-boot-random-seed.service": {
"name": "systemd-boot-random-seed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-boot-update.service": {
"name": "systemd-boot-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-bootctl@.service": {
"name": "systemd-bootctl@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-confext.service": {
"name": "systemd-confext.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-coredump@.service": {
"name": "systemd-coredump@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-creds@.service": {
"name": "systemd-creds@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-exit.service": {
"name": "systemd-exit.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-firstboot.service": {
"name": "systemd-firstboot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck-root.service": {
"name": "systemd-fsck-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck@.service": {
"name": "systemd-fsck@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-growfs-root.service": {
"name": "systemd-growfs-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-growfs@.service": {
"name": "systemd-growfs@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-halt.service": {
"name": "systemd-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hibernate-clear.service": {
"name": "systemd-hibernate-clear.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hibernate-resume.service": {
"name": "systemd-hibernate-resume.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hibernate.service": {
"name": "systemd-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hostnamed.service": {
"name": "systemd-hostnamed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hwdb-update.service": {
"name": "systemd-hwdb-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hybrid-sleep.service": {
"name": "systemd-hybrid-sleep.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-initctl.service": {
"name": "systemd-initctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-catalog-update.service": {
"name": "systemd-journal-catalog-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-flush.service": {
"name": "systemd-journal-flush.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journald-sync@.service": {
"name": "systemd-journald-sync@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-journald.service": {
"name": "systemd-journald.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-journald@.service": {
"name": "systemd-journald@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-kexec.service": {
"name": "systemd-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-localed.service": {
"name": "systemd-localed.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-logind.service": {
"name": "systemd-logind.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-machine-id-commit.service": {
"name": "systemd-machine-id-commit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-modules-load.service": {
"name": "systemd-modules-load.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-network-generator.service": {
"name": "systemd-network-generator.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-networkd-wait-online.service": {
"name": "systemd-networkd-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"systemd-oomd.service": {
"name": "systemd-oomd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"systemd-pcrextend@.service": {
"name": "systemd-pcrextend@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-pcrfs-root.service": {
"name": "systemd-pcrfs-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-pcrfs@.service": {
"name": "systemd-pcrfs@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-pcrlock-file-system.service": {
"name": "systemd-pcrlock-file-system.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-firmware-code.service": {
"name": "systemd-pcrlock-firmware-code.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-firmware-config.service": {
"name": "systemd-pcrlock-firmware-config.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-machine-id.service": {
"name": "systemd-pcrlock-machine-id.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-make-policy.service": {
"name": "systemd-pcrlock-make-policy.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-secureboot-authority.service": {
"name": "systemd-pcrlock-secureboot-authority.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-secureboot-policy.service": {
"name": "systemd-pcrlock-secureboot-policy.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock@.service": {
"name": "systemd-pcrlock@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-pcrmachine.service": {
"name": "systemd-pcrmachine.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-pcrphase-initrd.service": {
"name": "systemd-pcrphase-initrd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-pcrphase-sysinit.service": {
"name": "systemd-pcrphase-sysinit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-pcrphase.service": {
"name": "systemd-pcrphase.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-poweroff.service": {
"name": "systemd-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-pstore.service": {
"name": "systemd-pstore.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-quotacheck-root.service": {
"name": "systemd-quotacheck-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-quotacheck@.service": {
"name": "systemd-quotacheck@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-random-seed.service": {
"name": "systemd-random-seed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-reboot.service": {
"name": "systemd-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-remount-fs.service": {
"name": "systemd-remount-fs.service",
"source": "systemd",
"state": "stopped",
"status": "enabled-runtime"
},
"systemd-repart.service": {
"name": "systemd-repart.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-rfkill.service": {
"name": "systemd-rfkill.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-soft-reboot.service": {
"name": "systemd-soft-reboot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-suspend-then-hibernate.service": {
"name": "systemd-suspend-then-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend.service": {
"name": "systemd-suspend.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-sysctl.service": {
"name": "systemd-sysctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-sysext.service": {
"name": "systemd-sysext.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-sysext@.service": {
"name": "systemd-sysext@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-sysupdate-reboot.service": {
"name": "systemd-sysupdate-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"systemd-sysupdate.service": {
"name": "systemd-sysupdate.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"systemd-sysusers.service": {
"name": "systemd-sysusers.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-timedated.service": {
"name": "systemd-timedated.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-timesyncd.service": {
"name": "systemd-timesyncd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"systemd-tmpfiles-clean.service": {
"name": "systemd-tmpfiles-clean.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup-dev-early.service": {
"name": "systemd-tmpfiles-setup-dev-early.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup-dev.service": {
"name": "systemd-tmpfiles-setup-dev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup.service": {
"name": "systemd-tmpfiles-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tpm2-setup-early.service": {
"name": "systemd-tpm2-setup-early.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tpm2-setup.service": {
"name": "systemd-tpm2-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udev-load-credentials.service": {
"name": "systemd-udev-load-credentials.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"systemd-udev-settle.service": {
"name": "systemd-udev-settle.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udev-trigger.service": {
"name": "systemd-udev-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udevd.service": {
"name": "systemd-udevd.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-update-done.service": {
"name": "systemd-update-done.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp-runlevel.service": {
"name": "systemd-update-utmp-runlevel.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp.service": {
"name": "systemd-update-utmp.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-user-sessions.service": {
"name": "systemd-user-sessions.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-vconsole-setup.service": {
"name": "systemd-vconsole-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-volatile-root.service": {
"name": "systemd-volatile-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"user-runtime-dir@.service": {
"name": "user-runtime-dir@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user-runtime-dir@0.service": {
"name": "user-runtime-dir@0.service",
"source": "systemd",
"state": "stopped",
"status": "active"
},
"user@.service": {
"name": "user@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user@0.service": {
"name": "user@0.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"ypbind.service": {
"name": "ypbind.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
}
}
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116
Saturday 08 February 2025 11:41:08 -0500 (0:00:02.060) 0:02:43.842 *****
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state != \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Cancel linger] ************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196
Saturday 08 February 2025 11:41:08 -0500 (0:00:00.032) 0:02:43.874 *****
skipping: [managed-node1] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.podman : Handle credential files - absent] *****
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202
Saturday 08 February 2025 11:41:08 -0500 (0:00:00.030) 0:02:43.905 *****
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ********
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211
Saturday 08 February 2025 11:41:08 -0500 (0:00:00.029) 0:02:43.934 *****
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [Ensure no resources] *****************************************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:188
Saturday 08 February 2025 11:41:08 -0500 (0:00:00.053) 0:02:43.987 *****
fatal: [managed-node1]: FAILED! => {
"assertion": "__podman_test_debug_images.stdout == \"\"",
"changed": false,
"evaluated_to": false
}
MSG:
Assertion failed
TASK [Debug] *******************************************************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:199
Saturday 08 February 2025 11:41:09 -0500 (0:00:00.083) 0:02:44.071 *****
ok: [managed-node1] => {
"changed": false,
"cmd": "exec 1>&2\nset -x\nset -o pipefail\nsystemctl list-units --plain -l --all | grep quadlet || :\nsystemctl list-unit-files --all | grep quadlet || :\nsystemctl list-units --plain --failed -l --all | grep quadlet || :\n",
"delta": "0:00:00.373726",
"end": "2025-02-08 11:41:09.732215",
"rc": 0,
"start": "2025-02-08 11:41:09.358489"
}
STDERR:
+ set -o pipefail
+ systemctl list-units --plain -l --all
+ grep quadlet
+ :
+ systemctl list-unit-files --all
+ grep quadlet
+ :
+ systemctl list-units --plain --failed -l --all
+ grep quadlet
+ :
TASK [Get journald] ************************************************************
task path: /tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:209
Saturday 08 February 2025 11:41:09 -0500 (0:00:00.743) 0:02:44.814 *****
fatal: [managed-node1]: FAILED! => {
"changed": false,
"cmd": [
"journalctl",
"-ex"
],
"delta": "0:00:00.039293",
"end": "2025-02-08 11:41:10.130931",
"failed_when_result": true,
"rc": 0,
"start": "2025-02-08 11:41:10.091638"
}
STDOUT:
Feb 08 11:37:20 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 898.
Feb 08 11:37:20 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 903.
Feb 08 11:37:20 managed-node1 conmon[45320]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:21 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 908.
Feb 08 11:37:21 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 913.
Feb 08 11:37:21 managed-node1 conmon[45348]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:21 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 918.
Feb 08 11:37:21 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 923.
Feb 08 11:37:21 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 928.
Feb 08 11:37:21 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 933.
Feb 08 11:37:21 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 938.
Feb 08 11:37:21 managed-node1 conmon[45420]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:21 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 943.
Feb 08 11:37:22 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 948.
Feb 08 11:37:22 managed-node1 python3.12[45554]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:37:22 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 953.
Feb 08 11:37:22 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 958.
Feb 08 11:37:22 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 963.
Feb 08 11:37:22 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 968.
Feb 08 11:37:22 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 973.
Feb 08 11:37:22 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 978.
Feb 08 11:37:22 managed-node1 conmon[45668]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:22 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 983.
Feb 08 11:37:22 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 988.
Feb 08 11:37:23 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 993.
Feb 08 11:37:23 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 998.
Feb 08 11:37:23 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1003.
Feb 08 11:37:23 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1008.
Feb 08 11:37:23 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1013.
Feb 08 11:37:23 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1018.
Feb 08 11:37:23 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1023.
Feb 08 11:37:23 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1028.
Feb 08 11:37:23 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1033.
Feb 08 11:37:24 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1038.
Feb 08 11:37:24 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1043.
Feb 08 11:37:24 managed-node1 conmon[45967]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:24 managed-node1 python3.12[45962]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:37:24 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1048.
Feb 08 11:37:24 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1053.
Feb 08 11:37:24 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1058.
Feb 08 11:37:24 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1063.
Feb 08 11:37:24 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1068.
Feb 08 11:37:24 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1073.
Feb 08 11:37:25 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1078.
Feb 08 11:37:25 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1083.
Feb 08 11:37:25 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1088.
Feb 08 11:37:25 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1093.
Feb 08 11:37:25 managed-node1 conmon[46137]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:25 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1098.
Feb 08 11:37:25 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1103.
Feb 08 11:37:25 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1108.
Feb 08 11:37:25 managed-node1 python3.12[46280]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:37:25 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1113.
Feb 08 11:37:26 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1118.
Feb 08 11:37:26 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1123.
Feb 08 11:37:26 managed-node1 conmon[46377]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:26 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1128.
Feb 08 11:37:26 managed-node1 python3.12[46483]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None
Feb 08 11:37:26 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1133.
Feb 08 11:37:26 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1138.
Feb 08 11:37:26 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1143.
Feb 08 11:37:27 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1148.
Feb 08 11:37:27 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1153.
Feb 08 11:37:27 managed-node1 python3.12[46676]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:37:27 managed-node1 conmon[46689]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:27 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1158.
Feb 08 11:37:27 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1163.
Feb 08 11:37:27 managed-node1 conmon[46774]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:27 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1168.
Feb 08 11:37:27 managed-node1 python3.12[46865]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Feb 08 11:37:27 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1173.
Feb 08 11:37:27 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1178.
Feb 08 11:37:28 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1183.
Feb 08 11:37:28 managed-node1 conmon[46927]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:28 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1188.
Feb 08 11:37:28 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1193.
Feb 08 11:37:28 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1198.
Feb 08 11:37:28 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1203.
Feb 08 11:37:28 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1208.
Feb 08 11:37:28 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1213.
Feb 08 11:37:28 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1218.
Feb 08 11:37:28 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1223.
Feb 08 11:37:29 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1228.
Feb 08 11:37:29 managed-node1 python3.12[47169]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:37:29 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1233.
Feb 08 11:37:29 managed-node1 conmon[47182]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:29 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1238.
Feb 08 11:37:29 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1243.
Feb 08 11:37:29 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1248.
Feb 08 11:37:29 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1253.
Feb 08 11:37:29 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1258.
Feb 08 11:37:29 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1263.
Feb 08 11:37:30 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1268.
Feb 08 11:37:30 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1273.
Feb 08 11:37:30 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1278.
Feb 08 11:37:30 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1283.
Feb 08 11:37:30 managed-node1 conmon[47462]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:30 managed-node1 python3.12[47445]: ansible-systemd Invoked with name=auth_test_1_quadlet.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None
Feb 08 11:37:30 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1288.
Feb 08 11:37:30 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1293.
Feb 08 11:37:30 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1298.
Feb 08 11:37:31 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1303.
Feb 08 11:37:31 managed-node1 python3.12[47640]: ansible-stat Invoked with path=/etc/containers/systemd/auth_test_1_quadlet.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:37:31 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1308.
Feb 08 11:37:31 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1313.
Feb 08 11:37:31 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1318.
Feb 08 11:37:31 managed-node1 python3.12[47822]: ansible-file Invoked with path=/etc/containers/systemd/auth_test_1_quadlet.container state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Feb 08 11:37:31 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1323.
Feb 08 11:37:31 managed-node1 conmon[47840]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:31 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1328.
Feb 08 11:37:31 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1333.
Feb 08 11:37:32 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1338.
Feb 08 11:37:32 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1343.
Feb 08 11:37:32 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1348.
Feb 08 11:37:32 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1353.
Feb 08 11:37:32 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1358.
Feb 08 11:37:32 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1363.
Feb 08 11:37:32 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1368.
Feb 08 11:37:32 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1373.
Feb 08 11:37:32 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1378.
Feb 08 11:37:32 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1383.
Feb 08 11:37:33 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1388.
Feb 08 11:37:33 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1393.
Feb 08 11:37:33 managed-node1 conmon[48171]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:33 managed-node1 python3.12[48166]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:37:33 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1398.
Feb 08 11:37:33 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1403.
Feb 08 11:37:33 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1408.
Feb 08 11:37:33 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1413.
Feb 08 11:37:33 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1418.
Feb 08 11:37:33 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1423.
Feb 08 11:37:34 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1428.
Feb 08 11:37:34 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1433.
Feb 08 11:37:34 managed-node1 conmon[48420]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:34 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1438.
Feb 08 11:37:34 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1443.
Feb 08 11:37:34 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1448.
Feb 08 11:37:34 managed-node1 conmon[48567]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:34 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1453.
Feb 08 11:37:35 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1458.
Feb 08 11:37:35 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1463.
Feb 08 11:37:35 managed-node1 conmon[48747]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:35 managed-node1 python3.12[48774]: ansible-file Invoked with path=/root/.config/containers state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Feb 08 11:37:35 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1468.
Feb 08 11:37:35 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1473.
Feb 08 11:37:35 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1478.
Feb 08 11:37:35 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1483.
Feb 08 11:37:35 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1488.
Feb 08 11:37:35 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1493.
Feb 08 11:37:36 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1498.
Feb 08 11:37:36 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1503.
Feb 08 11:37:36 managed-node1 conmon[49016]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:36 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1508.
Feb 08 11:37:36 managed-node1 python3.12[49014]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:37:36 managed-node1 conmon[49031]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:36 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1513.
Feb 08 11:37:36 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1518.
Feb 08 11:37:36 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1523.
Feb 08 11:37:36 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1528.
Feb 08 11:37:36 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1533.
Feb 08 11:37:37 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1538.
Feb 08 11:37:37 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1543.
Feb 08 11:37:37 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1548.
Feb 08 11:37:37 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1553.
Feb 08 11:37:37 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1558.
Feb 08 11:37:37 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1563.
Feb 08 11:37:37 managed-node1 conmon[49454]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:37 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1568.
Feb 08 11:37:38 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1573.
Feb 08 11:37:38 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1578.
Feb 08 11:37:38 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1583.
Feb 08 11:37:38 managed-node1 conmon[49642]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:38 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1588.
Feb 08 11:37:38 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1593.
Feb 08 11:37:38 managed-node1 conmon[49757]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:38 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1598.
Feb 08 11:37:38 managed-node1 conmon[49817]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:38 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1603.
Feb 08 11:37:39 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1608.
Feb 08 11:37:39 managed-node1 conmon[49906]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:39 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1613.
Feb 08 11:37:39 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1618.
Feb 08 11:37:39 managed-node1 conmon[50006]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:39 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1623.
Feb 08 11:37:39 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1628.
Feb 08 11:37:39 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1633.
Feb 08 11:37:39 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1638.
Feb 08 11:37:39 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1643.
Feb 08 11:37:40 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1648.
Feb 08 11:37:40 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1653.
Feb 08 11:37:40 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1658.
Feb 08 11:37:40 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1663.
Feb 08 11:37:40 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1668.
Feb 08 11:37:40 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1673.
Feb 08 11:37:41 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1678.
Feb 08 11:37:41 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1683.
Feb 08 11:37:41 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1688.
Feb 08 11:37:41 managed-node1 conmon[50407]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:41 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1693.
Feb 08 11:37:41 managed-node1 python3.12[50484]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:37:41 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1698.
Feb 08 11:37:41 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1703.
Feb 08 11:37:41 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1708.
Feb 08 11:37:42 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1713.
Feb 08 11:37:42 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1718.
Feb 08 11:37:42 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1723.
Feb 08 11:37:42 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1728.
Feb 08 11:37:42 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1733.
Feb 08 11:37:42 managed-node1 conmon[50696]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:42 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1738.
Feb 08 11:37:42 managed-node1 conmon[50755]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:42 managed-node1 python3.12[50750]: ansible-getent Invoked with database=passwd key=auth_test_user1 fail_key=False service=None split=None
Feb 08 11:37:42 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1743.
Feb 08 11:37:42 managed-node1 conmon[50784]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:42 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1748.
Feb 08 11:37:43 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1753.
Feb 08 11:37:43 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1758.
Feb 08 11:37:43 managed-node1 python3.12[50954]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:37:43 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1763.
Feb 08 11:37:43 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1768.
Feb 08 11:37:43 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1773.
Feb 08 11:37:43 managed-node1 conmon[51073]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:43 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1778.
Feb 08 11:37:43 managed-node1 python3.12[51131]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids auth_test_user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:37:43 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1783.
Feb 08 11:37:44 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1788.
Feb 08 11:37:44 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1793.
Feb 08 11:37:44 managed-node1 conmon[51264]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:44 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1798.
Feb 08 11:37:44 managed-node1 conmon[51323]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:44 managed-node1 python3.12[51319]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g auth_test_user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:37:44 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1803.
Feb 08 11:37:44 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1808.
Feb 08 11:37:44 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1813.
Feb 08 11:37:44 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1818.
Feb 08 11:37:44 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1823.
Feb 08 11:37:45 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1828.
Feb 08 11:37:45 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1833.
Feb 08 11:37:45 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1838.
Feb 08 11:37:45 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1843.
Feb 08 11:37:45 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1848.
Feb 08 11:37:45 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1853.
Feb 08 11:37:45 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1858.
Feb 08 11:37:45 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1863.
Feb 08 11:37:45 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1868.
Feb 08 11:37:46 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1873.
Feb 08 11:37:46 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1878.
Feb 08 11:37:46 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1883.
Feb 08 11:37:46 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1888.
Feb 08 11:37:46 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1893.
Feb 08 11:37:46 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1898.
Feb 08 11:37:46 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1903.
Feb 08 11:37:46 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1908.
Feb 08 11:37:47 managed-node1 python3.12[51774]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:37:47 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1913.
Feb 08 11:37:47 managed-node1 conmon[51785]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:47 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1918.
Feb 08 11:37:47 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1923.
Feb 08 11:37:47 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1928.
Feb 08 11:37:47 managed-node1 conmon[51958]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:47 managed-node1 python3.12[51964]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids auth_test_user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:37:47 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1933.
Feb 08 11:37:47 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1938.
Feb 08 11:37:47 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1943.
Feb 08 11:37:48 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1948.
Feb 08 11:37:48 managed-node1 python3.12[52140]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g auth_test_user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:37:48 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1953.
Feb 08 11:37:48 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1958.
Feb 08 11:37:48 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1963.
Feb 08 11:37:48 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1968.
Feb 08 11:37:48 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1973.
Feb 08 11:37:48 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1978.
Feb 08 11:37:48 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1983.
Feb 08 11:37:49 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1988.
Feb 08 11:37:49 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1993.
Feb 08 11:37:49 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1998.
Feb 08 11:37:49 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2003.
Feb 08 11:37:49 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2008.
Feb 08 11:37:49 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2013.
Feb 08 11:37:49 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2018.
Feb 08 11:37:49 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2023.
Feb 08 11:37:49 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2028.
Feb 08 11:37:49 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2033.
Feb 08 11:37:50 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2038.
Feb 08 11:37:50 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2043.
Feb 08 11:37:50 managed-node1 conmon[52529]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:50 managed-node1 python3.12[52567]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:37:50 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2048.
Feb 08 11:37:50 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2053.
Feb 08 11:37:50 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2058.
Feb 08 11:37:50 managed-node1 conmon[52688]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:50 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2063.
Feb 08 11:37:51 managed-node1 python3.12[52746]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids auth_test_user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:37:51 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2068.
Feb 08 11:37:51 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2073.
Feb 08 11:37:51 managed-node1 conmon[52798]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:51 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2078.
Feb 08 11:37:51 managed-node1 conmon[52878]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:51 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2083.
Feb 08 11:37:51 managed-node1 python3.12[52931]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g auth_test_user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:37:51 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2088.
Feb 08 11:37:51 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2093.
Feb 08 11:37:51 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2098.
Feb 08 11:37:51 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2103.
Feb 08 11:37:52 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2108.
Feb 08 11:37:52 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2113.
Feb 08 11:37:52 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2118.
Feb 08 11:37:52 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2123.
Feb 08 11:37:52 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2128.
Feb 08 11:37:52 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2133.
Feb 08 11:37:52 managed-node1 conmon[53102]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:52 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2138.
Feb 08 11:37:52 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2143.
Feb 08 11:37:52 managed-node1 conmon[53130]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:52 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2148.
Feb 08 11:37:53 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2153.
Feb 08 11:37:53 managed-node1 python3.12[53270]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:37:53 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2158.
Feb 08 11:37:53 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2163.
Feb 08 11:37:53 managed-node1 conmon[53321]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:53 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2168.
Feb 08 11:37:53 managed-node1 conmon[53395]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:53 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2173.
Feb 08 11:37:53 managed-node1 python3.12[53455]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids auth_test_user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:37:53 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2178.
Feb 08 11:37:53 managed-node1 conmon[53475]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:54 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2183.
Feb 08 11:37:54 managed-node1 conmon[53513]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:54 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2188.
Feb 08 11:37:54 managed-node1 python3.12[53645]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g auth_test_user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:37:54 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2193.
Feb 08 11:37:54 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2198.
Feb 08 11:37:54 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2203.
Feb 08 11:37:54 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2208.
Feb 08 11:37:54 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2213.
Feb 08 11:37:54 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2218.
Feb 08 11:37:55 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2223.
Feb 08 11:37:55 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2228.
Feb 08 11:37:55 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2233.
Feb 08 11:37:55 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2238.
Feb 08 11:37:55 managed-node1 conmon[53804]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:55 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2243.
Feb 08 11:37:55 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2248.
Feb 08 11:37:55 managed-node1 python3.12[53939]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /home/auth_test_user1/.config/containers/ansible-kubernetes.d/auth_test_1_kube.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:37:55 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2253.
Feb 08 11:37:55 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2258.
Feb 08 11:37:56 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2263.
Feb 08 11:37:56 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2268.
Feb 08 11:37:56 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2273.
Feb 08 11:37:56 managed-node1 python3.12[54141]: ansible-stat Invoked with path=/run/user/2001 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:37:56 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2278.
Feb 08 11:37:56 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2283.
Feb 08 11:37:56 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2288.
Feb 08 11:37:56 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2293.
Feb 08 11:37:56 managed-node1 sudo[54381]: root : TTY=pts/0 ; PWD=/root ; USER=auth_test_user1 ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ihofhreqrxeptvmwzqrayhartccduuer ; XDG_RUNTIME_DIR=/run/user/2001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739032676.679524-18143-86205116781318/AnsiballZ_systemd.py'
Feb 08 11:37:56 managed-node1 sudo[54381]: pam_unix(sudo:session): session opened for user auth_test_user1(uid=2001) by root(uid=0)
Feb 08 11:37:57 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2298.
Feb 08 11:37:57 managed-node1 conmon[54396]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:57 managed-node1 python3.12[54384]: ansible-systemd Invoked with name=podman-kube@-home-auth_test_user1-.config-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service scope=user state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None
Feb 08 11:37:57 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2303.
Feb 08 11:37:57 managed-node1 systemd[38864]: Reload requested from client PID 54415 ('systemctl')...
Feb 08 11:37:57 managed-node1 systemd[38864]: Reloading...
Feb 08 11:37:57 managed-node1 systemd[38864]: Reloading finished in 76 ms.
Feb 08 11:37:57 managed-node1 systemd[38864]: Stopping podman-kube@-home-auth_test_user1-.config-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service - A template for running K8s workloads via podman-kube-play...
░░ Subject: A stop job for unit UNIT has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit UNIT has begun execution.
░░
░░ The job identifier is 2308.
Feb 08 11:37:57 managed-node1 systemd[38864]: Started libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope - libcrun container.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2309.
Feb 08 11:37:57 managed-node1 conmon[54439]: conmon bed3753ef9644759006b : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-bed3753ef9644759006b2fed937a0ab0c55e29e7b2d67af0d3adae5488592ba7.scope/container/memory.events
Feb 08 11:37:57 managed-node1 conmon[40491]: conmon 324d5393f78257167d57 : Failed to open cgroups file: /sys/fs/cgroup/user.slice/user-2001.slice/user@2001.service/user.slice/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice/libpod-324d5393f78257167d5764cb7afc89987fad3edefd478ca295635ba52fd770ae.scope/container/memory.events
Feb 08 11:37:57 managed-node1 kernel: podman1: port 1(veth0) entered disabled state
Feb 08 11:37:57 managed-node1 kernel: veth0 (unregistering): left allmulticast mode
Feb 08 11:37:57 managed-node1 kernel: veth0 (unregistering): left promiscuous mode
Feb 08 11:37:57 managed-node1 kernel: podman1: port 1(veth0) entered disabled state
Feb 08 11:37:57 managed-node1 systemd[38864]: Removed slice user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice - cgroup user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice.
░░ Subject: A stop job for unit UNIT has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit UNIT has finished.
░░
░░ The job identifier is 2314 and the job result is done.
Feb 08 11:37:57 managed-node1 systemd[38864]: user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice: Consumed 5.566s CPU time, 1M memory peak.
░░ Subject: Resources consumed by unit runtime
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit UNIT completed and consumed the indicated resources.
Feb 08 11:37:57 managed-node1 systemd[38864]: user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice: Failed to open /run/user/2001/systemd/transient/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice: No such file or directory
Feb 08 11:37:57 managed-node1 systemd[38864]: user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice: Failed to open /run/user/2001/systemd/transient/user-libpod_pod_b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768.slice: No such file or directory
Feb 08 11:37:57 managed-node1 podman[54437]: Pods stopped:
Feb 08 11:37:57 managed-node1 podman[54437]: b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768
Feb 08 11:37:57 managed-node1 podman[54437]: Pods removed:
Feb 08 11:37:57 managed-node1 podman[54437]: b544dbbdaedb8bef01c4be207a1b2853379e36eb264dc450b97a0bf80f859768
Feb 08 11:37:57 managed-node1 podman[54437]: Secrets removed:
Feb 08 11:37:57 managed-node1 podman[54437]: Volumes removed:
Feb 08 11:37:57 managed-node1 systemd[38864]: Stopped podman-kube@-home-auth_test_user1-.config-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service - A template for running K8s workloads via podman-kube-play.
░░ Subject: A stop job for unit UNIT has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit UNIT has finished.
░░
░░ The job identifier is 2308 and the job result is done.
Feb 08 11:37:57 managed-node1 systemd[38864]: podman-kube@-home-auth_test_user1-.config-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service: Consumed 31.717s CPU time, 66M memory peak.
░░ Subject: Resources consumed by unit runtime
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit UNIT completed and consumed the indicated resources.
Feb 08 11:37:57 managed-node1 sudo[54381]: pam_unix(sudo:session): session closed for user auth_test_user1
Feb 08 11:37:58 managed-node1 python3.12[54620]: ansible-stat Invoked with path=/home/auth_test_user1/.config/containers/ansible-kubernetes.d/auth_test_1_kube.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:37:58 managed-node1 sudo[54795]: root : TTY=pts/0 ; PWD=/root ; USER=auth_test_user1 ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hrvqsmibnimnvmranqsotvloggvsezjm ; XDG_RUNTIME_DIR=/run/user/2001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739032678.3640776-18213-113790122363062/AnsiballZ_podman_play.py'
Feb 08 11:37:58 managed-node1 sudo[54795]: pam_unix(sudo:session): session opened for user auth_test_user1(uid=2001) by root(uid=0)
Feb 08 11:37:58 managed-node1 python3.12[54798]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/home/auth_test_user1/.config/containers/ansible-kubernetes.d/auth_test_1_kube.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None
Feb 08 11:37:58 managed-node1 python3.12[54798]: ansible-containers.podman.podman_play version: 5.3.1, kube file /home/auth_test_user1/.config/containers/ansible-kubernetes.d/auth_test_1_kube.yml
Feb 08 11:37:58 managed-node1 systemd[38864]: Started podman-54805.scope.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2315.
Feb 08 11:37:58 managed-node1 sudo[54795]: pam_unix(sudo:session): session closed for user auth_test_user1
Feb 08 11:37:59 managed-node1 python3.12[54943]: ansible-file Invoked with path=/home/auth_test_user1/.config/containers/ansible-kubernetes.d/auth_test_1_kube.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Feb 08 11:38:00 managed-node1 python3.12[55074]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:38:01 managed-node1 python3.12[55207]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids auth_test_user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:38:01 managed-node1 python3.12[55339]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g auth_test_user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:38:02 managed-node1 python3.12[55471]: ansible-stat Invoked with path=/run/user/2001 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:38:03 managed-node1 sudo[55646]: root : TTY=pts/0 ; PWD=/root ; USER=auth_test_user1 ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-byuuatgiizqcussmqigajaedqxsdcycd ; XDG_RUNTIME_DIR=/run/user/2001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739032682.9539752-18432-141256958750757/AnsiballZ_systemd.py'
Feb 08 11:38:03 managed-node1 sudo[55646]: pam_unix(sudo:session): session opened for user auth_test_user1(uid=2001) by root(uid=0)
Feb 08 11:38:03 managed-node1 python3.12[55649]: ansible-systemd Invoked with name=auth_test_1_quadlet.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None
Feb 08 11:38:03 managed-node1 systemd[38864]: Reload requested from client PID 55652 ('systemctl')...
Feb 08 11:38:03 managed-node1 systemd[38864]: Reloading...
Feb 08 11:38:03 managed-node1 systemd[38864]: Reloading finished in 46 ms.
Feb 08 11:38:03 managed-node1 sudo[55646]: pam_unix(sudo:session): session closed for user auth_test_user1
Feb 08 11:38:03 managed-node1 python3.12[55792]: ansible-stat Invoked with path=/home/auth_test_user1/.config/containers/systemd/auth_test_1_quadlet.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:38:04 managed-node1 python3.12[56056]: ansible-file Invoked with path=/home/auth_test_user1/.config/containers/systemd/auth_test_1_quadlet.container state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Feb 08 11:38:05 managed-node1 sudo[56229]: root : TTY=pts/0 ; PWD=/root ; USER=auth_test_user1 ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bksxuiilqaamjotvdhqucttoezjojkbx ; XDG_RUNTIME_DIR=/run/user/2001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739032685.13388-18532-173458811090601/AnsiballZ_systemd.py'
Feb 08 11:38:05 managed-node1 sudo[56229]: pam_unix(sudo:session): session opened for user auth_test_user1(uid=2001) by root(uid=0)
Feb 08 11:38:05 managed-node1 python3.12[56232]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None
Feb 08 11:38:05 managed-node1 systemd[38864]: Reload requested from client PID 56233 ('systemctl')...
Feb 08 11:38:05 managed-node1 systemd[38864]: Reloading...
Feb 08 11:38:05 managed-node1 systemd[38864]: Reloading finished in 44 ms.
Feb 08 11:38:05 managed-node1 sudo[56229]: pam_unix(sudo:session): session closed for user auth_test_user1
Feb 08 11:38:05 managed-node1 sudo[56415]: root : TTY=pts/0 ; PWD=/root ; USER=auth_test_user1 ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ocdzmsjejvmkfbhihookklcplcstjpfk ; XDG_RUNTIME_DIR=/run/user/2001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739032685.7974143-18553-204105367687523/AnsiballZ_command.py'
Feb 08 11:38:05 managed-node1 sudo[56415]: pam_unix(sudo:session): session opened for user auth_test_user1(uid=2001) by root(uid=0)
Feb 08 11:38:06 managed-node1 systemd[38864]: Started podman-56419.scope.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2319.
Feb 08 11:38:06 managed-node1 sudo[56415]: pam_unix(sudo:session): session closed for user auth_test_user1
Feb 08 11:38:07 managed-node1 python3.12[56556]: ansible-stat Invoked with path=/run/user/2001 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:38:08 managed-node1 sudo[56731]: root : TTY=pts/0 ; PWD=/root ; USER=auth_test_user1 ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gqtsjyctmhyuuvuvhllccljeziocofql ; XDG_RUNTIME_DIR=/run/user/2001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739032688.1015944-18668-162374725250699/AnsiballZ_podman_container_info.py'
Feb 08 11:38:08 managed-node1 sudo[56731]: pam_unix(sudo:session): session opened for user auth_test_user1(uid=2001) by root(uid=0)
Feb 08 11:38:08 managed-node1 python3.12[56734]: ansible-containers.podman.podman_container_info Invoked with executable=podman name=None
Feb 08 11:38:08 managed-node1 systemd[38864]: Started podman-56735.scope.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2323.
Feb 08 11:38:08 managed-node1 sudo[56731]: pam_unix(sudo:session): session closed for user auth_test_user1
Feb 08 11:38:09 managed-node1 sudo[56917]: root : TTY=pts/0 ; PWD=/root ; USER=auth_test_user1 ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dkcwsmxjgnrtrbxomndhjurouacrvwrq ; XDG_RUNTIME_DIR=/run/user/2001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739032688.9909039-18706-114640170574929/AnsiballZ_command.py'
Feb 08 11:38:09 managed-node1 sudo[56917]: pam_unix(sudo:session): session opened for user auth_test_user1(uid=2001) by root(uid=0)
Feb 08 11:38:09 managed-node1 python3.12[56920]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:38:09 managed-node1 systemd[38864]: Started podman-56921.scope.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2327.
Feb 08 11:38:09 managed-node1 sudo[56917]: pam_unix(sudo:session): session closed for user auth_test_user1
Feb 08 11:38:09 managed-node1 sudo[57101]: root : TTY=pts/0 ; PWD=/root ; USER=auth_test_user1 ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ebkyhgcwkrgpblpxtrrhmguxwnrorgip ; XDG_RUNTIME_DIR=/run/user/2001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739032689.7054307-18744-21441391375388/AnsiballZ_command.py'
Feb 08 11:38:09 managed-node1 sudo[57101]: pam_unix(sudo:session): session opened for user auth_test_user1(uid=2001) by root(uid=0)
Feb 08 11:38:10 managed-node1 python3.12[57104]: ansible-ansible.legacy.command Invoked with _raw_params=podman secret ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:38:10 managed-node1 systemd[38864]: Started podman-57105.scope.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2331.
Feb 08 11:38:10 managed-node1 sudo[57101]: pam_unix(sudo:session): session closed for user auth_test_user1
Feb 08 11:38:10 managed-node1 python3.12[57243]: ansible-ansible.legacy.command Invoked with removes=/var/lib/systemd/linger/auth_test_user1 _raw_params=loginctl disable-linger auth_test_user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None stdin=None
Feb 08 11:38:10 managed-node1 systemd[38864]: Activating special unit exit.target...
Feb 08 11:38:10 managed-node1 systemd[1]: Stopping user@2001.service - User Manager for UID 2001...
░░ Subject: A stop job for unit user@2001.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit user@2001.service has begun execution.
░░
░░ The job identifier is 6764.
Feb 08 11:38:10 managed-node1 systemd[38864]: Stopping podman-pause-e6744f9a.scope...
░░ Subject: A stop job for unit UNIT has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit UNIT has begun execution.
░░
░░ The job identifier is 2346.
Feb 08 11:38:10 managed-node1 systemd[38864]: Removed slice app-podman\x2dkube.slice - Slice /app/podman-kube.
░░ Subject: A stop job for unit UNIT has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit UNIT has finished.
░░
░░ The job identifier is 2348 and the job result is done.
Feb 08 11:38:10 managed-node1 systemd[38864]: app-podman\x2dkube.slice: Consumed 31.717s CPU time, 66M memory peak.
░░ Subject: Resources consumed by unit runtime
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit UNIT completed and consumed the indicated resources.
Feb 08 11:38:10 managed-node1 systemd[38864]: Stopped target default.target - Main User Target.
░░ Subject: A stop job for unit UNIT has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit UNIT has finished.
░░
░░ The job identifier is 2340 and the job result is done.
Feb 08 11:38:10 managed-node1 systemd[38864]: Stopped target basic.target - Basic System.
░░ Subject: A stop job for unit UNIT has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit UNIT has finished.
░░
░░ The job identifier is 2351 and the job result is done.
Feb 08 11:38:10 managed-node1 systemd[38864]: Stopped target paths.target - Paths.
░░ Subject: A stop job for unit UNIT has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit UNIT has finished.
░░
░░ The job identifier is 2343 and the job result is done.
Feb 08 11:38:10 managed-node1 systemd[38864]: Stopped target sockets.target - Sockets.
░░ Subject: A stop job for unit UNIT has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit UNIT has finished.
░░
░░ The job identifier is 2352 and the job result is done.
Feb 08 11:38:10 managed-node1 systemd[38864]: Stopped target timers.target - Timers.
░░ Subject: A stop job for unit UNIT has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit UNIT has finished.
░░
░░ The job identifier is 2350 and the job result is done.
Feb 08 11:38:10 managed-node1 systemd[38864]: Stopped grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes.
░░ Subject: A stop job for unit UNIT has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit UNIT has finished.
░░
░░ The job identifier is 2344 and the job result is done.
Feb 08 11:38:10 managed-node1 systemd[38864]: Stopped systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories.
░░ Subject: A stop job for unit UNIT has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit UNIT has finished.
░░
░░ The job identifier is 2355 and the job result is done.
Feb 08 11:38:10 managed-node1 systemd[38864]: Stopping dbus-broker.service - D-Bus User Message Bus...
░░ Subject: A stop job for unit UNIT has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit UNIT has begun execution.
░░
░░ The job identifier is 2342.
Feb 08 11:38:10 managed-node1 systemd[38864]: Stopped systemd-tmpfiles-setup.service - Create User Files and Directories.
░░ Subject: A stop job for unit UNIT has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit UNIT has finished.
░░
░░ The job identifier is 2349 and the job result is done.
Feb 08 11:38:10 managed-node1 systemd[38864]: Stopped podman-pause-e6744f9a.scope.
░░ Subject: A stop job for unit UNIT has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit UNIT has finished.
░░
░░ The job identifier is 2346 and the job result is done.
Feb 08 11:38:10 managed-node1 dbus-broker[39071]: Dispatched 16771 messages @ 5(±11)μs / message.
░░ Subject: Dispatched 16771 messages
░░ Defined-By: dbus-broker
░░ Support: https://groups.google.com/forum/#!forum/bus1-devel
░░
░░ This message is printed by dbus-broker when shutting down. It includes metric
░░ information collected during the runtime of dbus-broker.
░░
░░ The message lists the number of dispatched messages
░░ (in this case 16771) as well as the mean time to
░░ handling a single message. The time measurements exclude the time spent on
░░ writing to and reading from the kernel.
Feb 08 11:38:10 managed-node1 systemd[38864]: Stopped dbus-broker.service - D-Bus User Message Bus.
░░ Subject: A stop job for unit UNIT has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit UNIT has finished.
░░
░░ The job identifier is 2342 and the job result is done.
Feb 08 11:38:10 managed-node1 systemd[38864]: Removed slice session.slice - User Core Session Slice.
░░ Subject: A stop job for unit UNIT has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit UNIT has finished.
░░
░░ The job identifier is 2354 and the job result is done.
Feb 08 11:38:10 managed-node1 systemd[38864]: Removed slice user.slice - Slice /user.
░░ Subject: A stop job for unit UNIT has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit UNIT has finished.
░░
░░ The job identifier is 2345 and the job result is done.
Feb 08 11:38:10 managed-node1 systemd[38864]: user.slice: Consumed 6.637s CPU time, 64.6M memory peak.
░░ Subject: Resources consumed by unit runtime
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit UNIT completed and consumed the indicated resources.
Feb 08 11:38:10 managed-node1 systemd[38864]: Closed dbus.socket - D-Bus User Message Bus Socket.
░░ Subject: A stop job for unit UNIT has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit UNIT has finished.
░░
░░ The job identifier is 2341 and the job result is done.
Feb 08 11:38:10 managed-node1 systemd[38864]: Removed slice app.slice - User Application Slice.
░░ Subject: A stop job for unit UNIT has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit UNIT has finished.
░░
░░ The job identifier is 2347 and the job result is done.
Feb 08 11:38:10 managed-node1 systemd[38864]: app.slice: Consumed 31.743s CPU time, 66.6M memory peak.
░░ Subject: Resources consumed by unit runtime
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit UNIT completed and consumed the indicated resources.
Feb 08 11:38:10 managed-node1 systemd[38864]: Reached target shutdown.target - Shutdown.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2338.
Feb 08 11:38:10 managed-node1 systemd[38864]: Finished systemd-exit.service - Exit the Session.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2336.
Feb 08 11:38:10 managed-node1 systemd[38864]: Reached target exit.target - Exit the Session.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2335.
Feb 08 11:38:10 managed-node1 systemd[1]: user@2001.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit user@2001.service has successfully entered the 'dead' state.
Feb 08 11:38:10 managed-node1 systemd[1]: Stopped user@2001.service - User Manager for UID 2001.
░░ Subject: A stop job for unit user@2001.service has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit user@2001.service has finished.
░░
░░ The job identifier is 6764 and the job result is done.
Feb 08 11:38:10 managed-node1 systemd[1]: user@2001.service: Consumed 41.268s CPU time, 87.7M memory peak.
░░ Subject: Resources consumed by unit runtime
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit user@2001.service completed and consumed the indicated resources.
Feb 08 11:38:10 managed-node1 systemd[1]: Stopping user-runtime-dir@2001.service - User Runtime Directory /run/user/2001...
░░ Subject: A stop job for unit user-runtime-dir@2001.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit user-runtime-dir@2001.service has begun execution.
░░
░░ The job identifier is 6763.
Feb 08 11:38:10 managed-node1 systemd[1]: run-user-2001.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit run-user-2001.mount has successfully entered the 'dead' state.
Feb 08 11:38:10 managed-node1 systemd[1]: user-runtime-dir@2001.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit user-runtime-dir@2001.service has successfully entered the 'dead' state.
Feb 08 11:38:10 managed-node1 systemd[4425]: Created slice background.slice - User Background Tasks Slice.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 14.
Feb 08 11:38:10 managed-node1 systemd[1]: Stopped user-runtime-dir@2001.service - User Runtime Directory /run/user/2001.
░░ Subject: A stop job for unit user-runtime-dir@2001.service has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit user-runtime-dir@2001.service has finished.
░░
░░ The job identifier is 6763 and the job result is done.
Feb 08 11:38:10 managed-node1 systemd[4425]: Starting systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories...
░░ Subject: A start job for unit UNIT has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has begun execution.
░░
░░ The job identifier is 13.
Feb 08 11:38:10 managed-node1 systemd[1]: Removed slice user-2001.slice - User Slice of UID 2001.
░░ Subject: A stop job for unit user-2001.slice has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit user-2001.slice has finished.
░░
░░ The job identifier is 6765 and the job result is done.
Feb 08 11:38:10 managed-node1 systemd[1]: user-2001.slice: Consumed 41.293s CPU time, 87.8M memory peak.
░░ Subject: Resources consumed by unit runtime
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit user-2001.slice completed and consumed the indicated resources.
Feb 08 11:38:10 managed-node1 systemd-logind[662]: Removed session 7.
░░ Subject: Session 7 has been terminated
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ Documentation: sd-login(3)
░░
░░ A session with the ID 7 has been terminated.
Feb 08 11:38:10 managed-node1 systemd[4425]: Finished systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 13.
Feb 08 11:38:10 managed-node1 python3.12[57382]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State auth_test_user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:38:11 managed-node1 python3.12[57514]: ansible-ansible.legacy.systemd Invoked with name=systemd-logind state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None
Feb 08 11:38:11 managed-node1 systemd[1]: Stopping systemd-logind.service - User Login Management...
░░ Subject: A stop job for unit systemd-logind.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit systemd-logind.service has begun execution.
░░
░░ The job identifier is 6767.
Feb 08 11:38:11 managed-node1 systemd[1]: systemd-logind.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit systemd-logind.service has successfully entered the 'dead' state.
Feb 08 11:38:11 managed-node1 systemd[1]: Stopped systemd-logind.service - User Login Management.
░░ Subject: A stop job for unit systemd-logind.service has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit systemd-logind.service has finished.
░░
░░ The job identifier is 6767 and the job result is done.
Feb 08 11:38:11 managed-node1 python3.12[57660]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State auth_test_user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:38:11 managed-node1 systemd[1]: Starting modprobe@drm.service - Load Kernel Module drm...
░░ Subject: A start job for unit modprobe@drm.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit modprobe@drm.service has begun execution.
░░
░░ The job identifier is 6848.
Feb 08 11:38:11 managed-node1 systemd[1]: modprobe@drm.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit modprobe@drm.service has successfully entered the 'dead' state.
Feb 08 11:38:11 managed-node1 systemd[1]: Finished modprobe@drm.service - Load Kernel Module drm.
░░ Subject: A start job for unit modprobe@drm.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit modprobe@drm.service has finished successfully.
░░
░░ The job identifier is 6848.
Feb 08 11:38:11 managed-node1 systemd[1]: Starting systemd-logind.service - User Login Management...
░░ Subject: A start job for unit systemd-logind.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit systemd-logind.service has begun execution.
░░
░░ The job identifier is 6768.
Feb 08 11:38:11 managed-node1 systemd-logind[57664]: New seat seat0.
░░ Subject: A new seat seat0 is now available
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ Documentation: sd-login(3)
░░
░░ A new seat seat0 has been configured and is now available.
Feb 08 11:38:11 managed-node1 systemd-logind[57664]: Watching system buttons on /dev/input/event0 (Power Button)
Feb 08 11:38:11 managed-node1 systemd-logind[57664]: Watching system buttons on /dev/input/event1 (Sleep Button)
Feb 08 11:38:11 managed-node1 systemd-logind[57664]: Watching system buttons on /dev/input/event2 (AT Translated Set 2 keyboard)
Feb 08 11:38:12 managed-node1 systemd[1]: Started systemd-logind.service - User Login Management.
░░ Subject: A start job for unit systemd-logind.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit systemd-logind.service has finished successfully.
░░
░░ The job identifier is 6768.
Feb 08 11:38:12 managed-node1 python3.12[57801]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:38:13 managed-node1 python3.12[57934]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids auth_test_user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:38:13 managed-node1 python3.12[58066]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g auth_test_user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:38:16 managed-node1 python3.12[58460]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:38:16 managed-node1 python3.12[58593]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids auth_test_user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:38:17 managed-node1 python3.12[58725]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g auth_test_user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:38:20 managed-node1 python3.12[59512]: ansible-user Invoked with name=auth_test_user1 state=absent non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node1 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None
Feb 08 11:38:20 managed-node1 userdel[59514]: delete user 'auth_test_user1'
Feb 08 11:38:20 managed-node1 userdel[59514]: removed group 'auth_test_user1' owned by 'auth_test_user1'
Feb 08 11:38:20 managed-node1 userdel[59514]: removed shadow group 'auth_test_user1' owned by 'auth_test_user1'
Feb 08 11:38:20 managed-node1 python3.12[59645]: ansible-file Invoked with path=/home/auth_test_user1 state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Feb 08 11:38:21 managed-node1 python3.12[59776]: ansible-ansible.legacy.command Invoked with _raw_params=podman inspect podman_registry --format '{{range .}}{{range .Mounts}}{{if eq .Type "volume"}}{{.Name}}{{end}}{{end}}{{end}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:38:22 managed-node1 systemd[1]: Stopping session-3.scope - Session 3 of User root...
░░ Subject: A stop job for unit session-3.scope has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit session-3.scope has begun execution.
░░
░░ The job identifier is 6931.
Feb 08 11:38:22 managed-node1 sshd-session[4417]: error: mm_reap: preauth child terminated by signal 15
Feb 08 11:38:22 managed-node1 sshd-session[6608]: error: mm_reap: preauth child terminated by signal 15
Feb 08 11:38:22 managed-node1 systemd[1]: Stopping session-6.scope - Session 6 of User root...
░░ Subject: A stop job for unit session-6.scope has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit session-6.scope has begun execution.
░░
░░ The job identifier is 6930.
Feb 08 11:38:22 managed-node1 sshd-session[4417]: pam_systemd(sshd:session): Failed to release session: No session '3' known
Feb 08 11:38:22 managed-node1 sshd-session[6608]: pam_systemd(sshd:session): Failed to release session: No session '6' known
Feb 08 11:38:22 managed-node1 sshd-session[4417]: pam_unix(sshd:session): session closed for user root
Feb 08 11:38:22 managed-node1 sshd-session[6608]: pam_unix(sshd:session): session closed for user root
Feb 08 11:38:22 managed-node1 systemd[1]: session-3.scope: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit session-3.scope has successfully entered the 'dead' state.
Feb 08 11:38:22 managed-node1 systemd[1]: Stopped session-3.scope - Session 3 of User root.
░░ Subject: A stop job for unit session-3.scope has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit session-3.scope has finished.
░░
░░ The job identifier is 6931 and the job result is done.
Feb 08 11:38:22 managed-node1 systemd[1]: session-3.scope: Consumed 3.126s CPU time, 86.7M memory peak.
░░ Subject: Resources consumed by unit runtime
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit session-3.scope completed and consumed the indicated resources.
Feb 08 11:38:22 managed-node1 systemd[1]: session-6.scope: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit session-6.scope has successfully entered the 'dead' state.
Feb 08 11:38:22 managed-node1 systemd[1]: Stopped session-6.scope - Session 6 of User root.
░░ Subject: A stop job for unit session-6.scope has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit session-6.scope has finished.
░░
░░ The job identifier is 6930 and the job result is done.
Feb 08 11:38:22 managed-node1 systemd[1]: session-6.scope: Consumed 2min 30.088s CPU time, 435.4M memory peak.
░░ Subject: Resources consumed by unit runtime
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit session-6.scope completed and consumed the indicated resources.
Feb 08 11:38:22 managed-node1 systemd[1]: Stopping user@0.service - User Manager for UID 0...
░░ Subject: A stop job for unit user@0.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit user@0.service has begun execution.
░░
░░ The job identifier is 6932.
Feb 08 11:38:22 managed-node1 systemd[4425]: Activating special unit exit.target...
Feb 08 11:38:22 managed-node1 systemd[4425]: Removed slice background.slice - User Background Tasks Slice.
░░ Subject: A stop job for unit UNIT has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit UNIT has finished.
░░
░░ The job identifier is 20 and the job result is done.
Feb 08 11:38:22 managed-node1 systemd[4425]: Stopped target default.target - Main User Target.
░░ Subject: A stop job for unit UNIT has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit UNIT has finished.
░░
░░ The job identifier is 31 and the job result is done.
Feb 08 11:38:22 managed-node1 systemd[4425]: Stopped target basic.target - Basic System.
░░ Subject: A stop job for unit UNIT has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit UNIT has finished.
░░
░░ The job identifier is 30 and the job result is done.
Feb 08 11:38:22 managed-node1 systemd[4425]: Stopped target paths.target - Paths.
░░ Subject: A stop job for unit UNIT has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit UNIT has finished.
░░
░░ The job identifier is 28 and the job result is done.
Feb 08 11:38:22 managed-node1 systemd[4425]: Stopped target sockets.target - Sockets.
░░ Subject: A stop job for unit UNIT has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit UNIT has finished.
░░
░░ The job identifier is 29 and the job result is done.
Feb 08 11:38:22 managed-node1 systemd[4425]: Stopped target timers.target - Timers.
░░ Subject: A stop job for unit UNIT has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit UNIT has finished.
░░
░░ The job identifier is 32 and the job result is done.
Feb 08 11:38:22 managed-node1 systemd[4425]: Stopped systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories.
░░ Subject: A stop job for unit UNIT has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit UNIT has finished.
░░
░░ The job identifier is 27 and the job result is done.
Feb 08 11:38:22 managed-node1 systemd[4425]: Closed dbus.socket - D-Bus User Message Bus Socket.
░░ Subject: A stop job for unit UNIT has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit UNIT has finished.
░░
░░ The job identifier is 25 and the job result is done.
Feb 08 11:38:22 managed-node1 systemd[4425]: Stopped systemd-tmpfiles-setup.service - Create User Files and Directories.
░░ Subject: A stop job for unit UNIT has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit UNIT has finished.
░░
░░ The job identifier is 23 and the job result is done.
Feb 08 11:38:22 managed-node1 systemd[4425]: Removed slice app.slice - User Application Slice.
░░ Subject: A stop job for unit UNIT has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit UNIT has finished.
░░
░░ The job identifier is 22 and the job result is done.
Feb 08 11:38:22 managed-node1 systemd[4425]: Reached target shutdown.target - Shutdown.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 19.
Feb 08 11:38:22 managed-node1 systemd[4425]: Finished systemd-exit.service - Exit the Session.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 18.
Feb 08 11:38:22 managed-node1 systemd[4425]: Reached target exit.target - Exit the Session.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 17.
Feb 08 11:38:22 managed-node1 systemd[1]: user@0.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit user@0.service has successfully entered the 'dead' state.
Feb 08 11:38:22 managed-node1 systemd[1]: Stopped user@0.service - User Manager for UID 0.
░░ Subject: A stop job for unit user@0.service has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit user@0.service has finished.
░░
░░ The job identifier is 6932 and the job result is done.
Feb 08 11:38:22 managed-node1 systemd[1]: Stopping user-runtime-dir@0.service - User Runtime Directory /run/user/0...
░░ Subject: A stop job for unit user-runtime-dir@0.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit user-runtime-dir@0.service has begun execution.
░░
░░ The job identifier is 6929.
Feb 08 11:38:22 managed-node1 systemd[1]: run-user-0.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit run-user-0.mount has successfully entered the 'dead' state.
Feb 08 11:38:22 managed-node1 systemd[1]: user-runtime-dir@0.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit user-runtime-dir@0.service has successfully entered the 'dead' state.
Feb 08 11:38:22 managed-node1 systemd[1]: Stopped user-runtime-dir@0.service - User Runtime Directory /run/user/0.
░░ Subject: A stop job for unit user-runtime-dir@0.service has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit user-runtime-dir@0.service has finished.
░░
░░ The job identifier is 6929 and the job result is done.
Feb 08 11:38:22 managed-node1 systemd[1]: Removed slice user-0.slice - User Slice of UID 0.
░░ Subject: A stop job for unit user-0.slice has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit user-0.slice has finished.
░░
░░ The job identifier is 6933 and the job result is done.
Feb 08 11:38:22 managed-node1 systemd[1]: user-0.slice: Consumed 2min 33.561s CPU time, 500.7M memory peak.
░░ Subject: Resources consumed by unit runtime
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit user-0.slice completed and consumed the indicated resources.
Feb 08 11:38:22 managed-node1 sshd-session[59894]: Accepted publickey for root from 10.31.9.192 port 44136 ssh2: RSA SHA256:9j1blwt3wcrRiGYZQ7ZGu9axm3cDklH6/z4c+Ee8CzE
Feb 08 11:38:22 managed-node1 systemd[1]: Created slice user-0.slice - User Slice of UID 0.
░░ Subject: A start job for unit user-0.slice has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit user-0.slice has finished successfully.
░░
░░ The job identifier is 7013.
Feb 08 11:38:22 managed-node1 systemd[1]: Starting user-runtime-dir@0.service - User Runtime Directory /run/user/0...
░░ Subject: A start job for unit user-runtime-dir@0.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit user-runtime-dir@0.service has begun execution.
░░
░░ The job identifier is 6935.
Feb 08 11:38:22 managed-node1 systemd-logind[57664]: New session 8 of user root.
░░ Subject: A new session 8 has been created for user root
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ Documentation: sd-login(3)
░░
░░ A new session with the ID 8 has been created for the user root.
░░
░░ The leading process of the session is 59894.
Feb 08 11:38:22 managed-node1 systemd[1]: Finished user-runtime-dir@0.service - User Runtime Directory /run/user/0.
░░ Subject: A start job for unit user-runtime-dir@0.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit user-runtime-dir@0.service has finished successfully.
░░
░░ The job identifier is 6935.
Feb 08 11:38:22 managed-node1 systemd[1]: Starting user@0.service - User Manager for UID 0...
░░ Subject: A start job for unit user@0.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit user@0.service has begun execution.
░░
░░ The job identifier is 7015.
Feb 08 11:38:22 managed-node1 systemd-logind[57664]: New session 9 of user root.
░░ Subject: A new session 9 has been created for user root
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░ Documentation: sd-login(3)
░░
░░ A new session with the ID 9 has been created for the user root.
░░
░░ The leading process of the session is 59902.
Feb 08 11:38:22 managed-node1 (systemd)[59902]: pam_unix(systemd-user:session): session opened for user root(uid=0) by root(uid=0)
Feb 08 11:38:22 managed-node1 systemd[59902]: Queued start job for default target default.target.
Feb 08 11:38:22 managed-node1 systemd[59902]: Created slice app.slice - User Application Slice.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 10.
Feb 08 11:38:22 managed-node1 systemd[59902]: grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes was skipped because of an unmet condition check (ConditionUser=!@system).
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 7.
Feb 08 11:38:22 managed-node1 systemd[59902]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 6.
Feb 08 11:38:22 managed-node1 systemd[59902]: Reached target paths.target - Paths.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 3.
Feb 08 11:38:22 managed-node1 systemd[59902]: Reached target timers.target - Timers.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 5.
Feb 08 11:38:22 managed-node1 systemd[59902]: Starting dbus.socket - D-Bus User Message Bus Socket...
░░ Subject: A start job for unit UNIT has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has begun execution.
░░
░░ The job identifier is 9.
Feb 08 11:38:22 managed-node1 systemd[59902]: Starting systemd-tmpfiles-setup.service - Create User Files and Directories...
░░ Subject: A start job for unit UNIT has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has begun execution.
░░
░░ The job identifier is 12.
Feb 08 11:38:22 managed-node1 systemd[59902]: Finished systemd-tmpfiles-setup.service - Create User Files and Directories.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 12.
Feb 08 11:38:22 managed-node1 systemd[59902]: Listening on dbus.socket - D-Bus User Message Bus Socket.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 9.
Feb 08 11:38:22 managed-node1 systemd[59902]: Reached target sockets.target - Sockets.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 8.
Feb 08 11:38:22 managed-node1 systemd[59902]: Reached target basic.target - Basic System.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 2.
Feb 08 11:38:22 managed-node1 systemd[59902]: Reached target default.target - Main User Target.
░░ Subject: A start job for unit UNIT has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit UNIT has finished successfully.
░░
░░ The job identifier is 1.
Feb 08 11:38:22 managed-node1 systemd[1]: Started user@0.service - User Manager for UID 0.
░░ Subject: A start job for unit user@0.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit user@0.service has finished successfully.
░░
░░ The job identifier is 7015.
Feb 08 11:38:22 managed-node1 systemd[59902]: Startup finished in 101ms.
░░ Subject: User manager start-up is now complete
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The user manager instance for user 0 has been started. All services queued
░░ for starting have been started. Note that other services might still be starting
░░ up or be started at any later time.
░░
░░ Startup of the manager took 101677 microseconds.
Feb 08 11:38:22 managed-node1 systemd[1]: Started session-8.scope - Session 8 of User root.
░░ Subject: A start job for unit session-8.scope has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit session-8.scope has finished successfully.
░░
░░ The job identifier is 7096.
Feb 08 11:38:22 managed-node1 sshd-session[59894]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0)
Feb 08 11:38:25 managed-node1 python3.12[60092]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Feb 08 11:38:27 managed-node1 python3.12[60261]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:38:27 managed-node1 python3.12[60392]: ansible-ansible.legacy.dnf Invoked with name=['python3-pyasn1', 'python3-cryptography', 'python3-dbus'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None
Feb 08 11:38:28 managed-node1 python3.12[60524]: ansible-ansible.legacy.dnf Invoked with name=['certmonger', 'python3-packaging'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None
Feb 08 11:38:29 managed-node1 python3.12[60656]: ansible-file Invoked with name=/etc/certmonger//pre-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//pre-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Feb 08 11:38:29 managed-node1 python3.12[60787]: ansible-file Invoked with name=/etc/certmonger//post-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//post-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Feb 08 11:38:30 managed-node1 python3.12[60918]: ansible-ansible.legacy.systemd Invoked with name=certmonger state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Feb 08 11:38:31 managed-node1 python3.12[61051]: ansible-fedora.linux_system_roles.certificate_request Invoked with name=quadlet_demo dns=['localhost'] directory=/etc/pki/tls wait=True ca=self-sign __header=#
# Ansible managed
#
# system_role:certificate
provider_config_directory=/etc/certmonger provider=certmonger key_usage=['digitalSignature', 'keyEncipherment'] extended_key_usage=['id-kp-serverAuth', 'id-kp-clientAuth'] auto_renew=True ip=None email=None common_name=None country=None state=None locality=None organization=None organizational_unit=None contact_email=None key_size=None owner=None group=None mode=None principal=None run_before=None run_after=None
Feb 08 11:38:31 managed-node1 certmonger[10262]: 2025-02-08 11:38:31 [10262] Wrote to /var/lib/certmonger/requests/20250208163831
Feb 08 11:38:31 managed-node1 certmonger[10262]: 2025-02-08 11:38:31 [10262] Wrote to /var/lib/certmonger/requests/20250208163831
Feb 08 11:38:31 managed-node1 certmonger[10262]: 2025-02-08 11:38:31 [10262] Wrote to /var/lib/certmonger/requests/20250208163831
Feb 08 11:38:31 managed-node1 certmonger[10262]: 2025-02-08 11:38:31 [10262] Wrote to /var/lib/certmonger/requests/20250208163831
Feb 08 11:38:31 managed-node1 certmonger[10262]: 2025-02-08 11:38:31 [10262] Wrote to /var/lib/certmonger/requests/20250208163831
Feb 08 11:38:31 managed-node1 certmonger[10262]: 2025-02-08 11:38:31 [10262] Wrote to /var/lib/certmonger/requests/20250208163831
Feb 08 11:38:31 managed-node1 certmonger[10262]: 2025-02-08 11:38:31 [10262] Wrote to /var/lib/certmonger/requests/20250208163831
Feb 08 11:38:31 managed-node1 certmonger[10262]: 2025-02-08 11:38:31 [10262] Wrote to /var/lib/certmonger/requests/20250208163831
Feb 08 11:38:31 managed-node1 certmonger[10262]: 2025-02-08 11:38:31 [10262] Wrote to /var/lib/certmonger/requests/20250208163831
Feb 08 11:38:31 managed-node1 certmonger[10262]: 2025-02-08 11:38:31 [10262] Wrote to /var/lib/certmonger/requests/20250208163831
Feb 08 11:38:31 managed-node1 certmonger[10262]: 2025-02-08 11:38:31 [10262] Wrote to /var/lib/certmonger/requests/20250208163831
Feb 08 11:38:31 managed-node1 certmonger[10262]: 2025-02-08 11:38:31 [10262] Wrote to /var/lib/certmonger/requests/20250208163831
Feb 08 11:38:31 managed-node1 certmonger[10262]: 2025-02-08 11:38:31 [10262] Wrote to /var/lib/certmonger/requests/20250208163831
Feb 08 11:38:31 managed-node1 certmonger[10262]: 2025-02-08 11:38:31 [10262] Wrote to /var/lib/certmonger/requests/20250208163831
Feb 08 11:38:31 managed-node1 certmonger[10262]: 2025-02-08 11:38:31 [10262] Wrote to /var/lib/certmonger/requests/20250208163831
Feb 08 11:38:31 managed-node1 certmonger[10262]: 2025-02-08 11:38:31 [10262] Wrote to /var/lib/certmonger/requests/20250208163831
Feb 08 11:38:31 managed-node1 certmonger[10262]: 2025-02-08 11:38:31 [10262] Wrote to /var/lib/certmonger/requests/20250208163831
Feb 08 11:38:31 managed-node1 certmonger[10262]: 2025-02-08 11:38:31 [10262] Wrote to /var/lib/certmonger/requests/20250208163831
Feb 08 11:38:31 managed-node1 certmonger[10262]: 2025-02-08 11:38:31 [10262] Wrote to /var/lib/certmonger/requests/20250208163831
Feb 08 11:38:31 managed-node1 certmonger[10262]: 2025-02-08 11:38:31 [10262] Wrote to /var/lib/certmonger/requests/20250208163831
Feb 08 11:38:31 managed-node1 certmonger[10262]: 2025-02-08 11:38:31 [10262] Wrote to /var/lib/certmonger/requests/20250208163831
Feb 08 11:38:31 managed-node1 certmonger[10262]: 2025-02-08 11:38:31 [10262] Wrote to /var/lib/certmonger/requests/20250208163831
Feb 08 11:38:31 managed-node1 certmonger[10262]: 2025-02-08 11:38:31 [10262] Wrote to /var/lib/certmonger/requests/20250208163831
Feb 08 11:38:31 managed-node1 certmonger[10262]: 2025-02-08 11:38:31 [10262] Wrote to /var/lib/certmonger/requests/20250208163831
Feb 08 11:38:31 managed-node1 certmonger[10262]: 2025-02-08 11:38:31 [10262] Wrote to /var/lib/certmonger/requests/20250208163831
Feb 08 11:38:31 managed-node1 certmonger[10262]: 2025-02-08 11:38:31 [10262] Wrote to /var/lib/certmonger/requests/20250208163831
Feb 08 11:38:31 managed-node1 certmonger[10262]: 2025-02-08 11:38:31 [10262] Wrote to /var/lib/certmonger/requests/20250208163831
Feb 08 11:38:31 managed-node1 certmonger[10262]: 2025-02-08 11:38:31 [10262] Wrote to /var/lib/certmonger/requests/20250208163831
Feb 08 11:38:31 managed-node1 certmonger[10262]: 2025-02-08 11:38:31 [10262] Wrote to /var/lib/certmonger/requests/20250208163831
Feb 08 11:38:31 managed-node1 certmonger[61066]: Certificate in file "/etc/pki/tls/certs/quadlet_demo.crt" issued by CA and saved.
Feb 08 11:38:31 managed-node1 certmonger[10262]: 2025-02-08 11:38:31 [10262] Wrote to /var/lib/certmonger/requests/20250208163831
Feb 08 11:38:32 managed-node1 python3.12[61197]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt
Feb 08 11:38:32 managed-node1 python3.12[61328]: ansible-slurp Invoked with path=/etc/pki/tls/private/quadlet_demo.key src=/etc/pki/tls/private/quadlet_demo.key
Feb 08 11:38:32 managed-node1 python3.12[61459]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt
Feb 08 11:38:33 managed-node1 python3.12[61590]: ansible-ansible.legacy.command Invoked with _raw_params=getcert stop-tracking -f /etc/pki/tls/certs/quadlet_demo.crt _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:38:33 managed-node1 certmonger[10262]: 2025-02-08 11:38:33 [10262] Wrote to /var/lib/certmonger/requests/20250208163831
Feb 08 11:38:33 managed-node1 python3.12[61722]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Feb 08 11:38:34 managed-node1 python3.12[61853]: ansible-file Invoked with path=/etc/pki/tls/private/quadlet_demo.key state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Feb 08 11:38:34 managed-node1 python3.12[61984]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Feb 08 11:38:35 managed-node1 python3.12[62115]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:38:35 managed-node1 python3.12[62246]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:38:37 managed-node1 python3.12[62508]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:38:39 managed-node1 python3.12[62645]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None
Feb 08 11:38:39 managed-node1 python3.12[62777]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:38:42 managed-node1 python3.12[62910]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:38:43 managed-node1 python3.12[63041]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:38:43 managed-node1 python3.12[63172]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None
Feb 08 11:38:45 managed-node1 python3.12[63304]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None
Feb 08 11:38:45 managed-node1 python3.12[63437]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Feb 08 11:38:45 managed-node1 systemd[1]: Reload requested from client PID 63440 ('systemctl') (unit session-8.scope)...
Feb 08 11:38:45 managed-node1 systemd[1]: Reloading...
Feb 08 11:38:46 managed-node1 systemd-rc-local-generator[63486]: /etc/rc.d/rc.local is not marked executable, skipping.
Feb 08 11:38:46 managed-node1 systemd[1]: Reloading finished in 203 ms.
Feb 08 11:38:46 managed-node1 systemd[1]: Starting firewalld.service - firewalld - dynamic firewall daemon...
░░ Subject: A start job for unit firewalld.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit firewalld.service has begun execution.
░░
░░ The job identifier is 7178.
Feb 08 11:38:46 managed-node1 systemd[1]: Started firewalld.service - firewalld - dynamic firewall daemon.
░░ Subject: A start job for unit firewalld.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit firewalld.service has finished successfully.
░░
░░ The job identifier is 7178.
Feb 08 11:38:46 managed-node1 kernel: Warning: Unmaintained driver is detected: ip_set
Feb 08 11:38:46 managed-node1 systemd[1]: Starting polkit.service - Authorization Manager...
░░ Subject: A start job for unit polkit.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit polkit.service has begun execution.
░░
░░ The job identifier is 7261.
Feb 08 11:38:46 managed-node1 polkitd[63589]: Started polkitd version 125
Feb 08 11:38:46 managed-node1 systemd[1]: Started polkit.service - Authorization Manager.
░░ Subject: A start job for unit polkit.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit polkit.service has finished successfully.
░░
░░ The job identifier is 7261.
Feb 08 11:38:47 managed-node1 python3.12[63662]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None
Feb 08 11:38:47 managed-node1 python3.12[63793]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None
Feb 08 11:38:47 managed-node1 rsyslogd[894]: imjournal: journal files changed, reloading... [v8.2412.0-1.el10 try https://www.rsyslog.com/e/0 ]
Feb 08 11:38:53 managed-node1 python3.12[64400]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:38:55 managed-node1 python3.12[64533]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Feb 08 11:38:55 managed-node1 python3.12[64664]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.network follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Feb 08 11:38:55 managed-node1 python3.12[64769]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1739032735.236063-21095-81303015722943/.source.network dest=/etc/containers/systemd/quadlet-demo.network owner=root group=0 mode=0644 _original_basename=quadlet-demo.network follow=False checksum=e57c08d49aff4bae8daab138d913aeddaa8682a0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Feb 08 11:38:56 managed-node1 python3.12[64900]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None
Feb 08 11:38:56 managed-node1 systemd[1]: Reload requested from client PID 64901 ('systemctl') (unit session-8.scope)...
Feb 08 11:38:56 managed-node1 systemd[1]: Reloading...
Feb 08 11:38:56 managed-node1 systemd-rc-local-generator[64947]: /etc/rc.d/rc.local is not marked executable, skipping.
Feb 08 11:38:56 managed-node1 systemd[1]: Reloading finished in 205 ms.
Feb 08 11:38:57 managed-node1 python3.12[65084]: ansible-systemd Invoked with name=quadlet-demo-network.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None
Feb 08 11:38:57 managed-node1 systemd[1]: Starting quadlet-demo-network.service...
░░ Subject: A start job for unit quadlet-demo-network.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit quadlet-demo-network.service has begun execution.
░░
░░ The job identifier is 7340.
Feb 08 11:38:57 managed-node1 quadlet-demo-network[65088]: systemd-quadlet-demo
Feb 08 11:38:57 managed-node1 systemd[1]: Finished quadlet-demo-network.service.
░░ Subject: A start job for unit quadlet-demo-network.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit quadlet-demo-network.service has finished successfully.
░░
░░ The job identifier is 7340.
Feb 08 11:38:58 managed-node1 python3.12[65227]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:39:00 managed-node1 python3.12[65360]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Feb 08 11:39:00 managed-node1 python3.12[65491]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.volume follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Feb 08 11:39:01 managed-node1 python3.12[65596]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1739032740.7139578-21333-262300184216330/.source.volume dest=/etc/containers/systemd/quadlet-demo-mysql.volume owner=root group=0 mode=0644 _original_basename=quadlet-demo-mysql.volume follow=False checksum=585f8cbdf0ec73000f9227dcffbef71e9552ea4a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Feb 08 11:39:02 managed-node1 python3.12[65727]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None
Feb 08 11:39:02 managed-node1 systemd[1]: Reload requested from client PID 65728 ('systemctl') (unit session-8.scope)...
Feb 08 11:39:02 managed-node1 systemd[1]: Reloading...
Feb 08 11:39:02 managed-node1 systemd-rc-local-generator[65771]: /etc/rc.d/rc.local is not marked executable, skipping.
Feb 08 11:39:02 managed-node1 systemd[1]: Reloading finished in 201 ms.
Feb 08 11:39:02 managed-node1 python3.12[65911]: ansible-systemd Invoked with name=quadlet-demo-mysql-volume.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None
Feb 08 11:39:02 managed-node1 systemd[1]: Starting quadlet-demo-mysql-volume.service...
░░ Subject: A start job for unit quadlet-demo-mysql-volume.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit quadlet-demo-mysql-volume.service has begun execution.
░░
░░ The job identifier is 7424.
Feb 08 11:39:02 managed-node1 podman[65915]: 2025-02-08 11:39:02.835786103 -0500 EST m=+0.025088541 volume create systemd-quadlet-demo-mysql
Feb 08 11:39:02 managed-node1 quadlet-demo-mysql-volume[65915]: systemd-quadlet-demo-mysql
Feb 08 11:39:02 managed-node1 systemd[1]: Finished quadlet-demo-mysql-volume.service.
░░ Subject: A start job for unit quadlet-demo-mysql-volume.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit quadlet-demo-mysql-volume.service has finished successfully.
░░
░░ The job identifier is 7424.
Feb 08 11:39:03 managed-node1 python3.12[66053]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:39:05 managed-node1 python3.12[66186]: ansible-file Invoked with path=/tmp/quadlet_demo state=directory owner=root group=root mode=0777 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Feb 08 11:39:11 managed-node1 podman[66325]: 2025-02-08 11:39:11.459257349 -0500 EST m=+5.659315206 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6
Feb 08 11:39:11 managed-node1 python3.12[66637]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Feb 08 11:39:12 managed-node1 python3.12[66768]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Feb 08 11:39:12 managed-node1 python3.12[66873]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/systemd/quadlet-demo-mysql.container owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1739032752.035067-21647-78977375058148/.source.container _original_basename=.r271y9tw follow=False checksum=ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Feb 08 11:39:13 managed-node1 python3.12[67004]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None
Feb 08 11:39:13 managed-node1 systemd[1]: Reload requested from client PID 67005 ('systemctl') (unit session-8.scope)...
Feb 08 11:39:13 managed-node1 systemd[1]: Reloading...
Feb 08 11:39:13 managed-node1 systemd-rc-local-generator[67048]: /etc/rc.d/rc.local is not marked executable, skipping.
Feb 08 11:39:13 managed-node1 systemd[1]: Reloading finished in 208 ms.
Feb 08 11:39:13 managed-node1 python3.12[67188]: ansible-systemd Invoked with name=quadlet-demo-mysql.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None
Feb 08 11:39:13 managed-node1 systemd[1]: Starting quadlet-demo-mysql.service...
░░ Subject: A start job for unit quadlet-demo-mysql.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit quadlet-demo-mysql.service has begun execution.
░░
░░ The job identifier is 7508.
Feb 08 11:39:13 managed-node1 podman[67192]: 2025-02-08 11:39:13.99119408 -0500 EST m=+0.042540131 container create 05159afa0f593fffd2274cad1533e16d9907a26d6e0621fcc4c448b5b2ab088f (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service)
Feb 08 11:39:14 managed-node1 kernel: podman2: port 1(veth1) entered blocking state
Feb 08 11:39:14 managed-node1 kernel: podman2: port 1(veth1) entered disabled state
Feb 08 11:39:14 managed-node1 kernel: veth1: entered allmulticast mode
Feb 08 11:39:14 managed-node1 kernel: veth1: entered promiscuous mode
Feb 08 11:39:14 managed-node1 kernel: podman2: port 1(veth1) entered blocking state
Feb 08 11:39:14 managed-node1 kernel: podman2: port 1(veth1) entered forwarding state
Feb 08 11:39:14 managed-node1 (udev-worker)[67204]: Network interface NamePolicy= disabled on kernel command line.
Feb 08 11:39:14 managed-node1 NetworkManager[723]: [1739032754.0187] device (podman2): carrier: link connected
Feb 08 11:39:14 managed-node1 NetworkManager[723]: [1739032754.0201] manager: (podman2): new Bridge device (/org/freedesktop/NetworkManager/Devices/9)
Feb 08 11:39:14 managed-node1 (udev-worker)[67203]: Network interface NamePolicy= disabled on kernel command line.
Feb 08 11:39:14 managed-node1 NetworkManager[723]: [1739032754.0222] device (veth1): carrier: link connected
Feb 08 11:39:14 managed-node1 NetworkManager[723]: [1739032754.0235] manager: (veth1): new Veth device (/org/freedesktop/NetworkManager/Devices/10)
Feb 08 11:39:14 managed-node1 NetworkManager[723]: [1739032754.0409] device (podman2): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Feb 08 11:39:14 managed-node1 NetworkManager[723]: [1739032754.0420] device (podman2): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external')
Feb 08 11:39:14 managed-node1 NetworkManager[723]: [1739032754.0441] device (podman2): Activation: starting connection 'podman2' (d5a995c4-b6f4-4f37-bf76-fa260a125ff8)
Feb 08 11:39:14 managed-node1 NetworkManager[723]: [1739032754.0445] device (podman2): state change: disconnected -> prepare (reason 'none', managed-type: 'external')
Feb 08 11:39:14 managed-node1 NetworkManager[723]: [1739032754.0450] device (podman2): state change: prepare -> config (reason 'none', managed-type: 'external')
Feb 08 11:39:14 managed-node1 NetworkManager[723]: [1739032754.0454] device (podman2): state change: config -> ip-config (reason 'none', managed-type: 'external')
Feb 08 11:39:14 managed-node1 NetworkManager[723]: [1739032754.0459] device (podman2): state change: ip-config -> ip-check (reason 'none', managed-type: 'external')
Feb 08 11:39:14 managed-node1 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service...
░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit NetworkManager-dispatcher.service has begun execution.
░░
░░ The job identifier is 7594.
Feb 08 11:39:14 managed-node1 podman[67192]: 2025-02-08 11:39:13.974333823 -0500 EST m=+0.025679967 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6
Feb 08 11:39:14 managed-node1 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service.
░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit NetworkManager-dispatcher.service has finished successfully.
░░
░░ The job identifier is 7594.
Feb 08 11:39:14 managed-node1 NetworkManager[723]: [1739032754.0908] device (podman2): state change: ip-check -> secondaries (reason 'none', managed-type: 'external')
Feb 08 11:39:14 managed-node1 NetworkManager[723]: [1739032754.0911] device (podman2): state change: secondaries -> activated (reason 'none', managed-type: 'external')
Feb 08 11:39:14 managed-node1 NetworkManager[723]: [1739032754.0919] device (podman2): Activation: successful, device activated.
Feb 08 11:39:14 managed-node1 systemd[1]: Started run-p67241-i67541.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run.
░░ Subject: A start job for unit run-p67241-i67541.scope has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit run-p67241-i67541.scope has finished successfully.
░░
░░ The job identifier is 7673.
Feb 08 11:39:14 managed-node1 systemd[1]: Started 05159afa0f593fffd2274cad1533e16d9907a26d6e0621fcc4c448b5b2ab088f-46ffaa87b097264a.timer - [systemd-run] /usr/bin/podman healthcheck run 05159afa0f593fffd2274cad1533e16d9907a26d6e0621fcc4c448b5b2ab088f.
░░ Subject: A start job for unit 05159afa0f593fffd2274cad1533e16d9907a26d6e0621fcc4c448b5b2ab088f-46ffaa87b097264a.timer has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit 05159afa0f593fffd2274cad1533e16d9907a26d6e0621fcc4c448b5b2ab088f-46ffaa87b097264a.timer has finished successfully.
░░
░░ The job identifier is 7679.
Feb 08 11:39:14 managed-node1 podman[67192]: 2025-02-08 11:39:14.181465423 -0500 EST m=+0.232811647 container init 05159afa0f593fffd2274cad1533e16d9907a26d6e0621fcc4c448b5b2ab088f (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service)
Feb 08 11:39:14 managed-node1 systemd[1]: Started quadlet-demo-mysql.service.
░░ Subject: A start job for unit quadlet-demo-mysql.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit quadlet-demo-mysql.service has finished successfully.
░░
░░ The job identifier is 7508.
Feb 08 11:39:14 managed-node1 podman[67192]: 2025-02-08 11:39:14.226893808 -0500 EST m=+0.278240383 container start 05159afa0f593fffd2274cad1533e16d9907a26d6e0621fcc4c448b5b2ab088f (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service)
Feb 08 11:39:14 managed-node1 quadlet-demo-mysql[67192]: 05159afa0f593fffd2274cad1533e16d9907a26d6e0621fcc4c448b5b2ab088f
Feb 08 11:39:14 managed-node1 podman[67257]: 2025-02-08 11:39:14.396058928 -0500 EST m=+0.147062872 container health_status 05159afa0f593fffd2274cad1533e16d9907a26d6e0621fcc4c448b5b2ab088f (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=0, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service)
Feb 08 11:39:15 managed-node1 python3.12[67450]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:39:17 managed-node1 python3.12[67618]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Feb 08 11:39:18 managed-node1 python3.12[67749]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/envoy-proxy-configmap.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Feb 08 11:39:18 managed-node1 python3.12[67854]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1739032757.942965-21894-226207580527898/.source.yml dest=/etc/containers/systemd/envoy-proxy-configmap.yml owner=root group=0 mode=0644 _original_basename=envoy-proxy-configmap.yml follow=False checksum=d681c7d56f912150d041873e880818b22a90c188 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Feb 08 11:39:19 managed-node1 python3.12[67985]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None
Feb 08 11:39:19 managed-node1 systemd[1]: Reload requested from client PID 67986 ('systemctl') (unit session-8.scope)...
Feb 08 11:39:19 managed-node1 systemd[1]: Reloading...
Feb 08 11:39:19 managed-node1 systemd-rc-local-generator[68037]: /etc/rc.d/rc.local is not marked executable, skipping.
Feb 08 11:39:19 managed-node1 systemd[1]: Reloading finished in 395 ms.
Feb 08 11:39:20 managed-node1 python3.12[68207]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:39:21 managed-node1 python3.12[68347]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Feb 08 11:39:22 managed-node1 python3.12[68478]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Feb 08 11:39:22 managed-node1 python3.12[68583]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/systemd/quadlet-demo.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1739032762.141144-22084-275772725018437/.source.yml _original_basename=.e286fqxx follow=False checksum=998dccde0483b1654327a46ddd89cbaa47650370 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Feb 08 11:39:23 managed-node1 python3.12[68714]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None
Feb 08 11:39:23 managed-node1 systemd[1]: Reload requested from client PID 68715 ('systemctl') (unit session-8.scope)...
Feb 08 11:39:23 managed-node1 systemd[1]: Reloading...
Feb 08 11:39:23 managed-node1 systemd-rc-local-generator[68761]: /etc/rc.d/rc.local is not marked executable, skipping.
Feb 08 11:39:23 managed-node1 systemd[1]: Reloading finished in 215 ms.
Feb 08 11:39:24 managed-node1 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.
Feb 08 11:39:24 managed-node1 python3.12[68924]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:39:25 managed-node1 python3.12[69057]: ansible-slurp Invoked with path=/etc/containers/systemd/quadlet-demo.yml src=/etc/containers/systemd/quadlet-demo.yml
Feb 08 11:39:26 managed-node1 python3.12[69188]: ansible-file Invoked with path=/tmp/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None
Feb 08 11:39:26 managed-node1 python3.12[69319]: ansible-file Invoked with path=/tmp/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None
Feb 08 11:39:37 managed-node1 podman[69458]: 2025-02-08 11:39:37.754681866 -0500 EST m=+10.841737523 image pull fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache
Feb 08 11:39:42 managed-node1 podman[69878]: 2025-02-08 11:39:42.323570477 -0500 EST m=+4.109100126 image pull 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0
Feb 08 11:39:42 managed-node1 python3.12[70140]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Feb 08 11:39:43 managed-node1 python3.12[70271]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.kube follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Feb 08 11:39:43 managed-node1 python3.12[70376]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1739032782.9002678-22772-261467899926975/.source.kube dest=/etc/containers/systemd/quadlet-demo.kube owner=root group=0 mode=0644 _original_basename=quadlet-demo.kube follow=False checksum=7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Feb 08 11:39:44 managed-node1 python3.12[70507]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None
Feb 08 11:39:44 managed-node1 systemd[1]: Reload requested from client PID 70508 ('systemctl') (unit session-8.scope)...
Feb 08 11:39:44 managed-node1 systemd[1]: Reloading...
Feb 08 11:39:44 managed-node1 systemd-rc-local-generator[70551]: /etc/rc.d/rc.local is not marked executable, skipping.
Feb 08 11:39:44 managed-node1 systemd[1]: Reloading finished in 221 ms.
Feb 08 11:39:44 managed-node1 podman[70669]: 2025-02-08 11:39:44.743370669 -0500 EST m=+0.116604126 container health_status 05159afa0f593fffd2274cad1533e16d9907a26d6e0621fcc4c448b5b2ab088f (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=0, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service)
Feb 08 11:39:44 managed-node1 python3.12[70698]: ansible-systemd Invoked with name=quadlet-demo.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None
Feb 08 11:39:44 managed-node1 systemd[1]: Starting quadlet-demo.service...
░░ Subject: A start job for unit quadlet-demo.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit quadlet-demo.service has begun execution.
░░
░░ The job identifier is 7913.
Feb 08 11:39:45 managed-node1 quadlet-demo[70711]: Pods stopped:
Feb 08 11:39:45 managed-node1 quadlet-demo[70711]: Pods removed:
Feb 08 11:39:45 managed-node1 quadlet-demo[70711]: Secrets removed:
Feb 08 11:39:45 managed-node1 quadlet-demo[70711]: Volumes removed:
Feb 08 11:39:45 managed-node1 podman[70711]: 2025-02-08 11:39:45.011971676 -0500 EST m=+0.027535813 volume create wp-pv-claim
Feb 08 11:39:45 managed-node1 podman[70711]: 2025-02-08 11:39:45.03103197 -0500 EST m=+0.046596193 container create ba15754f1ee0b24c01c5cc08c58e3f526c07bac193ca36fbfb746d58dfaa2e39 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Feb 08 11:39:45 managed-node1 podman[70711]: 2025-02-08 11:39:45.037053996 -0500 EST m=+0.052618133 volume create envoy-proxy-config
Feb 08 11:39:45 managed-node1 podman[70711]: 2025-02-08 11:39:45.042326579 -0500 EST m=+0.057890715 volume create envoy-certificates
Feb 08 11:39:45 managed-node1 systemd[1]: Created slice machine-libpod_pod_5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9.slice - cgroup machine-libpod_pod_5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9.slice.
░░ Subject: A start job for unit machine-libpod_pod_5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9.slice has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit machine-libpod_pod_5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9.slice has finished successfully.
░░
░░ The job identifier is 8000.
Feb 08 11:39:45 managed-node1 podman[70711]: 2025-02-08 11:39:45.08655493 -0500 EST m=+0.102119073 container create d3d31a88915ce9e5717916386c2e8da88c10933d536d43a0ef95d1635b38dead (image=localhost/podman-pause:5.3.1-1733097600, name=5bb5b766f277-infra, pod_id=5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0)
Feb 08 11:39:45 managed-node1 podman[70711]: 2025-02-08 11:39:45.09283821 -0500 EST m=+0.108402346 pod create 5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9 (image=, name=quadlet-demo)
Feb 08 11:39:45 managed-node1 podman[70711]: 2025-02-08 11:39:45.101895515 -0500 EST m=+0.117459784 image pull fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache
Feb 08 11:39:45 managed-node1 podman[70711]: 2025-02-08 11:39:45.133028632 -0500 EST m=+0.148592849 container create b2c27c1495982760473860289240579ddfea0264b503275d042962f77e018b99 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Feb 08 11:39:45 managed-node1 podman[70711]: 2025-02-08 11:39:45.161622762 -0500 EST m=+0.177186983 container create 1fc390fa3ff4e600423daa3102b35646f192b9f30742f52fdb009501b04686b6 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Feb 08 11:39:45 managed-node1 podman[70711]: 2025-02-08 11:39:45.162031533 -0500 EST m=+0.177595685 container restart ba15754f1ee0b24c01c5cc08c58e3f526c07bac193ca36fbfb746d58dfaa2e39 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Feb 08 11:39:45 managed-node1 systemd[1]: Started libpod-ba15754f1ee0b24c01c5cc08c58e3f526c07bac193ca36fbfb746d58dfaa2e39.scope - libcrun container.
░░ Subject: A start job for unit libpod-ba15754f1ee0b24c01c5cc08c58e3f526c07bac193ca36fbfb746d58dfaa2e39.scope has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit libpod-ba15754f1ee0b24c01c5cc08c58e3f526c07bac193ca36fbfb746d58dfaa2e39.scope has finished successfully.
░░
░░ The job identifier is 8006.
Feb 08 11:39:45 managed-node1 podman[70711]: 2025-02-08 11:39:45.137020932 -0500 EST m=+0.152585161 image pull 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0
Feb 08 11:39:45 managed-node1 podman[70711]: 2025-02-08 11:39:45.204320946 -0500 EST m=+0.219885129 container init ba15754f1ee0b24c01c5cc08c58e3f526c07bac193ca36fbfb746d58dfaa2e39 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Feb 08 11:39:45 managed-node1 podman[70711]: 2025-02-08 11:39:45.207197152 -0500 EST m=+0.222761378 container start ba15754f1ee0b24c01c5cc08c58e3f526c07bac193ca36fbfb746d58dfaa2e39 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Feb 08 11:39:45 managed-node1 kernel: podman2: port 2(veth2) entered blocking state
Feb 08 11:39:45 managed-node1 kernel: podman2: port 2(veth2) entered disabled state
Feb 08 11:39:45 managed-node1 kernel: veth2: entered allmulticast mode
Feb 08 11:39:45 managed-node1 kernel: veth2: entered promiscuous mode
Feb 08 11:39:45 managed-node1 kernel: podman2: port 2(veth2) entered blocking state
Feb 08 11:39:45 managed-node1 kernel: podman2: port 2(veth2) entered forwarding state
Feb 08 11:39:45 managed-node1 NetworkManager[723]: [1739032785.2331] device (veth2): carrier: link connected
Feb 08 11:39:45 managed-node1 NetworkManager[723]: [1739032785.2375] manager: (veth2): new Veth device (/org/freedesktop/NetworkManager/Devices/11)
Feb 08 11:39:45 managed-node1 (udev-worker)[70727]: Network interface NamePolicy= disabled on kernel command line.
Feb 08 11:39:45 managed-node1 systemd[1]: Started libpod-d3d31a88915ce9e5717916386c2e8da88c10933d536d43a0ef95d1635b38dead.scope - libcrun container.
░░ Subject: A start job for unit libpod-d3d31a88915ce9e5717916386c2e8da88c10933d536d43a0ef95d1635b38dead.scope has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit libpod-d3d31a88915ce9e5717916386c2e8da88c10933d536d43a0ef95d1635b38dead.scope has finished successfully.
░░
░░ The job identifier is 8012.
Feb 08 11:39:45 managed-node1 podman[70711]: 2025-02-08 11:39:45.316600664 -0500 EST m=+0.332164872 container init d3d31a88915ce9e5717916386c2e8da88c10933d536d43a0ef95d1635b38dead (image=localhost/podman-pause:5.3.1-1733097600, name=5bb5b766f277-infra, pod_id=5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0)
Feb 08 11:39:45 managed-node1 podman[70711]: 2025-02-08 11:39:45.319313859 -0500 EST m=+0.334878117 container start d3d31a88915ce9e5717916386c2e8da88c10933d536d43a0ef95d1635b38dead (image=localhost/podman-pause:5.3.1-1733097600, name=5bb5b766f277-infra, pod_id=5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0)
Feb 08 11:39:45 managed-node1 systemd[1]: Started libpod-b2c27c1495982760473860289240579ddfea0264b503275d042962f77e018b99.scope - libcrun container.
░░ Subject: A start job for unit libpod-b2c27c1495982760473860289240579ddfea0264b503275d042962f77e018b99.scope has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit libpod-b2c27c1495982760473860289240579ddfea0264b503275d042962f77e018b99.scope has finished successfully.
░░
░░ The job identifier is 8019.
Feb 08 11:39:45 managed-node1 podman[70711]: 2025-02-08 11:39:45.371467867 -0500 EST m=+0.387032097 container init b2c27c1495982760473860289240579ddfea0264b503275d042962f77e018b99 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Feb 08 11:39:45 managed-node1 podman[70711]: 2025-02-08 11:39:45.374065443 -0500 EST m=+0.389629667 container start b2c27c1495982760473860289240579ddfea0264b503275d042962f77e018b99 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Feb 08 11:39:45 managed-node1 systemd[1]: Started libpod-1fc390fa3ff4e600423daa3102b35646f192b9f30742f52fdb009501b04686b6.scope - libcrun container.
░░ Subject: A start job for unit libpod-1fc390fa3ff4e600423daa3102b35646f192b9f30742f52fdb009501b04686b6.scope has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit libpod-1fc390fa3ff4e600423daa3102b35646f192b9f30742f52fdb009501b04686b6.scope has finished successfully.
░░
░░ The job identifier is 8026.
Feb 08 11:39:45 managed-node1 podman[70711]: 2025-02-08 11:39:45.44777035 -0500 EST m=+0.463334730 container init 1fc390fa3ff4e600423daa3102b35646f192b9f30742f52fdb009501b04686b6 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Feb 08 11:39:45 managed-node1 podman[70711]: 2025-02-08 11:39:45.452659879 -0500 EST m=+0.468224130 container start 1fc390fa3ff4e600423daa3102b35646f192b9f30742f52fdb009501b04686b6 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Feb 08 11:39:45 managed-node1 podman[70711]: 2025-02-08 11:39:45.460444607 -0500 EST m=+0.476008749 pod start 5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9 (image=, name=quadlet-demo)
Feb 08 11:39:45 managed-node1 quadlet-demo[70711]: Volumes:
Feb 08 11:39:45 managed-node1 quadlet-demo[70711]: wp-pv-claim
Feb 08 11:39:45 managed-node1 quadlet-demo[70711]: Pod:
Feb 08 11:39:45 managed-node1 quadlet-demo[70711]: 5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9
Feb 08 11:39:45 managed-node1 quadlet-demo[70711]: Containers:
Feb 08 11:39:45 managed-node1 quadlet-demo[70711]: b2c27c1495982760473860289240579ddfea0264b503275d042962f77e018b99
Feb 08 11:39:45 managed-node1 quadlet-demo[70711]: 1fc390fa3ff4e600423daa3102b35646f192b9f30742f52fdb009501b04686b6
Feb 08 11:39:45 managed-node1 systemd[1]: Started quadlet-demo.service.
░░ Subject: A start job for unit quadlet-demo.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit quadlet-demo.service has finished successfully.
░░
░░ The job identifier is 7913.
Feb 08 11:39:46 managed-node1 python3.12[70974]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /etc/containers/systemd _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:39:46 managed-node1 python3.12[71168]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps -a _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:39:47 managed-node1 python3.12[71365]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:39:47 managed-node1 python3.12[71509]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod ps --ctr-ids --ctr-names --ctr-status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:39:47 managed-node1 python3.12[71647]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail; systemctl list-units | grep quadlet _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:39:48 managed-node1 python3.12[71781]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Feb 08 11:39:49 managed-node1 python3.12[71917]: ansible-ansible.legacy.command Invoked with _raw_params=cat /run/out _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:39:49 managed-node1 python3.12[72049]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps -a _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:39:50 managed-node1 python3.12[72187]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod ps --ctr-ids --ctr-names --ctr-status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:39:50 managed-node1 python3.12[72327]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail; systemctl list-units --all | grep quadlet _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:39:51 managed-node1 python3.12[72461]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /etc/systemd/system _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:39:53 managed-node1 python3.12[72724]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:39:53 managed-node1 python3.12[72862]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:39:55 managed-node1 python3.12[72995]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None
Feb 08 11:39:56 managed-node1 python3.12[73127]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None
Feb 08 11:39:57 managed-node1 python3.12[73260]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Feb 08 11:39:57 managed-node1 python3.12[73393]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None
Feb 08 11:39:58 managed-node1 python3.12[73524]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None
Feb 08 11:40:04 managed-node1 python3.12[74069]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:40:05 managed-node1 python3.12[74202]: ansible-systemd Invoked with name=quadlet-demo.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None
Feb 08 11:40:05 managed-node1 systemd[1]: Reload requested from client PID 74205 ('systemctl') (unit session-8.scope)...
Feb 08 11:40:05 managed-node1 systemd[1]: Reloading...
Feb 08 11:40:05 managed-node1 systemd-rc-local-generator[74248]: /etc/rc.d/rc.local is not marked executable, skipping.
Feb 08 11:40:05 managed-node1 systemd[1]: Reloading finished in 230 ms.
Feb 08 11:40:05 managed-node1 systemd[1]: Stopping quadlet-demo.service...
░░ Subject: A stop job for unit quadlet-demo.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit quadlet-demo.service has begun execution.
░░
░░ The job identifier is 8033.
Feb 08 11:40:05 managed-node1 systemd[1]: libpod-ba15754f1ee0b24c01c5cc08c58e3f526c07bac193ca36fbfb746d58dfaa2e39.scope: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit libpod-ba15754f1ee0b24c01c5cc08c58e3f526c07bac193ca36fbfb746d58dfaa2e39.scope has successfully entered the 'dead' state.
Feb 08 11:40:05 managed-node1 podman[74260]: 2025-02-08 11:40:05.744515062 -0500 EST m=+0.022977143 container died ba15754f1ee0b24c01c5cc08c58e3f526c07bac193ca36fbfb746d58dfaa2e39 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Feb 08 11:40:05 managed-node1 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-ba15754f1ee0b24c01c5cc08c58e3f526c07bac193ca36fbfb746d58dfaa2e39-userdata-shm.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay\x2dcontainers-ba15754f1ee0b24c01c5cc08c58e3f526c07bac193ca36fbfb746d58dfaa2e39-userdata-shm.mount has successfully entered the 'dead' state.
Feb 08 11:40:05 managed-node1 systemd[1]: var-lib-containers-storage-overlay-b10c27e4913a3b7081a0e6ed84c1ffbd803f8b0c22138b91f6f12391d0f9c725-merged.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay-b10c27e4913a3b7081a0e6ed84c1ffbd803f8b0c22138b91f6f12391d0f9c725-merged.mount has successfully entered the 'dead' state.
Feb 08 11:40:05 managed-node1 podman[74260]: 2025-02-08 11:40:05.821476725 -0500 EST m=+0.099938858 container cleanup ba15754f1ee0b24c01c5cc08c58e3f526c07bac193ca36fbfb746d58dfaa2e39 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Feb 08 11:40:05 managed-node1 podman[74269]: 2025-02-08 11:40:05.862974424 -0500 EST m=+0.024995707 pod stop 5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9 (image=, name=quadlet-demo)
Feb 08 11:40:05 managed-node1 systemd[1]: libpod-d3d31a88915ce9e5717916386c2e8da88c10933d536d43a0ef95d1635b38dead.scope: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit libpod-d3d31a88915ce9e5717916386c2e8da88c10933d536d43a0ef95d1635b38dead.scope has successfully entered the 'dead' state.
Feb 08 11:40:05 managed-node1 podman[74269]: 2025-02-08 11:40:05.883674303 -0500 EST m=+0.045695888 container died d3d31a88915ce9e5717916386c2e8da88c10933d536d43a0ef95d1635b38dead (image=localhost/podman-pause:5.3.1-1733097600, name=5bb5b766f277-infra, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0)
Feb 08 11:40:05 managed-node1 kernel: podman2: port 2(veth2) entered disabled state
Feb 08 11:40:05 managed-node1 kernel: veth2 (unregistering): left allmulticast mode
Feb 08 11:40:05 managed-node1 kernel: veth2 (unregistering): left promiscuous mode
Feb 08 11:40:05 managed-node1 kernel: podman2: port 2(veth2) entered disabled state
Feb 08 11:40:05 managed-node1 systemd[1]: libpod-1fc390fa3ff4e600423daa3102b35646f192b9f30742f52fdb009501b04686b6.scope: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit libpod-1fc390fa3ff4e600423daa3102b35646f192b9f30742f52fdb009501b04686b6.scope has successfully entered the 'dead' state.
Feb 08 11:40:05 managed-node1 podman[74269]: 2025-02-08 11:40:05.94358757 -0500 EST m=+0.105608943 container stop 1fc390fa3ff4e600423daa3102b35646f192b9f30742f52fdb009501b04686b6 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Feb 08 11:40:05 managed-node1 systemd[1]: run-netns-netns\x2d97e30ead\x2d9129\x2d6e53\x2d82c5\x2deec89aaf5532.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit run-netns-netns\x2d97e30ead\x2d9129\x2d6e53\x2d82c5\x2deec89aaf5532.mount has successfully entered the 'dead' state.
Feb 08 11:40:05 managed-node1 podman[74269]: 2025-02-08 11:40:05.965263697 -0500 EST m=+0.127284994 container died 1fc390fa3ff4e600423daa3102b35646f192b9f30742f52fdb009501b04686b6 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Feb 08 11:40:05 managed-node1 systemd[1]: libpod-b2c27c1495982760473860289240579ddfea0264b503275d042962f77e018b99.scope: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit libpod-b2c27c1495982760473860289240579ddfea0264b503275d042962f77e018b99.scope has successfully entered the 'dead' state.
Feb 08 11:40:05 managed-node1 systemd[1]: libpod-b2c27c1495982760473860289240579ddfea0264b503275d042962f77e018b99.scope: Consumed 1.120s CPU time, 87.5M memory peak.
░░ Subject: Resources consumed by unit runtime
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit libpod-b2c27c1495982760473860289240579ddfea0264b503275d042962f77e018b99.scope completed and consumed the indicated resources.
Feb 08 11:40:05 managed-node1 podman[74269]: 2025-02-08 11:40:05.993533101 -0500 EST m=+0.155554394 container died b2c27c1495982760473860289240579ddfea0264b503275d042962f77e018b99 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Feb 08 11:40:05 managed-node1 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-d3d31a88915ce9e5717916386c2e8da88c10933d536d43a0ef95d1635b38dead-userdata-shm.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay\x2dcontainers-d3d31a88915ce9e5717916386c2e8da88c10933d536d43a0ef95d1635b38dead-userdata-shm.mount has successfully entered the 'dead' state.
Feb 08 11:40:05 managed-node1 systemd[1]: var-lib-containers-storage-overlay-e62948ddbda5df4c52fc6e436ec3dbec01d7aa163e5d734554a934a9de913382-merged.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay-e62948ddbda5df4c52fc6e436ec3dbec01d7aa163e5d734554a934a9de913382-merged.mount has successfully entered the 'dead' state.
Feb 08 11:40:06 managed-node1 podman[74269]: 2025-02-08 11:40:06.018315181 -0500 EST m=+0.180336454 container cleanup d3d31a88915ce9e5717916386c2e8da88c10933d536d43a0ef95d1635b38dead (image=localhost/podman-pause:5.3.1-1733097600, name=5bb5b766f277-infra, pod_id=5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0)
Feb 08 11:40:06 managed-node1 podman[74269]: 2025-02-08 11:40:06.041263145 -0500 EST m=+0.203284627 container cleanup b2c27c1495982760473860289240579ddfea0264b503275d042962f77e018b99 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Feb 08 11:40:06 managed-node1 podman[74269]: 2025-02-08 11:40:06.080748067 -0500 EST m=+0.242769350 container cleanup 1fc390fa3ff4e600423daa3102b35646f192b9f30742f52fdb009501b04686b6 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Feb 08 11:40:06 managed-node1 systemd[1]: Removed slice machine-libpod_pod_5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9.slice - cgroup machine-libpod_pod_5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9.slice.
░░ Subject: A stop job for unit machine-libpod_pod_5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9.slice has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit machine-libpod_pod_5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9.slice has finished.
░░
░░ The job identifier is 8034 and the job result is done.
Feb 08 11:40:06 managed-node1 systemd[1]: machine-libpod_pod_5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9.slice: Consumed 1.224s CPU time, 108.7M memory peak, 26M written to disk.
░░ Subject: Resources consumed by unit runtime
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit machine-libpod_pod_5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9.slice completed and consumed the indicated resources.
Feb 08 11:40:06 managed-node1 podman[74269]: 2025-02-08 11:40:06.089215702 -0500 EST m=+0.251237102 pod stop 5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9 (image=, name=quadlet-demo)
Feb 08 11:40:06 managed-node1 systemd[1]: machine-libpod_pod_5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9.slice: Failed to open /run/systemd/transient/machine-libpod_pod_5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9.slice: No such file or directory
Feb 08 11:40:06 managed-node1 podman[74269]: 2025-02-08 11:40:06.095917676 -0500 EST m=+0.257938991 pod stop 5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9 (image=, name=quadlet-demo)
Feb 08 11:40:06 managed-node1 systemd[1]: machine-libpod_pod_5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9.slice: Failed to open /run/systemd/transient/machine-libpod_pod_5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9.slice: No such file or directory
Feb 08 11:40:06 managed-node1 podman[74269]: 2025-02-08 11:40:06.124132332 -0500 EST m=+0.286153542 container remove b2c27c1495982760473860289240579ddfea0264b503275d042962f77e018b99 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Feb 08 11:40:06 managed-node1 podman[74269]: 2025-02-08 11:40:06.145386712 -0500 EST m=+0.307407912 container remove 1fc390fa3ff4e600423daa3102b35646f192b9f30742f52fdb009501b04686b6 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Feb 08 11:40:06 managed-node1 podman[74269]: 2025-02-08 11:40:06.173087486 -0500 EST m=+0.335108697 container remove d3d31a88915ce9e5717916386c2e8da88c10933d536d43a0ef95d1635b38dead (image=localhost/podman-pause:5.3.1-1733097600, name=5bb5b766f277-infra, pod_id=5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0)
Feb 08 11:40:06 managed-node1 systemd[1]: machine-libpod_pod_5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9.slice: Failed to open /run/systemd/transient/machine-libpod_pod_5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9.slice: No such file or directory
Feb 08 11:40:06 managed-node1 podman[74269]: 2025-02-08 11:40:06.182036494 -0500 EST m=+0.344057663 pod remove 5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9 (image=, name=quadlet-demo)
Feb 08 11:40:06 managed-node1 podman[74269]: 2025-02-08 11:40:06.203539098 -0500 EST m=+0.365560311 container remove ba15754f1ee0b24c01c5cc08c58e3f526c07bac193ca36fbfb746d58dfaa2e39 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service)
Feb 08 11:40:06 managed-node1 quadlet-demo[74269]: Pods stopped:
Feb 08 11:40:06 managed-node1 quadlet-demo[74269]: 5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9
Feb 08 11:40:06 managed-node1 quadlet-demo[74269]: Pods removed:
Feb 08 11:40:06 managed-node1 quadlet-demo[74269]: 5bb5b766f27766d0ae568eb2a38b1f69172acaa7584f1362133b30871cf15ff9
Feb 08 11:40:06 managed-node1 quadlet-demo[74269]: Secrets removed:
Feb 08 11:40:06 managed-node1 quadlet-demo[74269]: Volumes removed:
Feb 08 11:40:06 managed-node1 systemd[1]: quadlet-demo.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit quadlet-demo.service has successfully entered the 'dead' state.
Feb 08 11:40:06 managed-node1 systemd[1]: Stopped quadlet-demo.service.
░░ Subject: A stop job for unit quadlet-demo.service has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit quadlet-demo.service has finished.
░░
░░ The job identifier is 8033 and the job result is done.
Feb 08 11:40:06 managed-node1 python3.12[74445]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo.kube follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:40:06 managed-node1 systemd[1]: var-lib-containers-storage-overlay-cd91608cc1711d72176ee4423c6e7d17a81a4a68c09500cffe6170627529bab8-merged.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay-cd91608cc1711d72176ee4423c6e7d17a81a4a68c09500cffe6170627529bab8-merged.mount has successfully entered the 'dead' state.
Feb 08 11:40:06 managed-node1 systemd[1]: var-lib-containers-storage-overlay-34f90b6c28cc569498454bff650cce1e60efa692898b23e302e116726e9de5ac-merged.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay-34f90b6c28cc569498454bff650cce1e60efa692898b23e302e116726e9de5ac-merged.mount has successfully entered the 'dead' state.
Feb 08 11:40:07 managed-node1 python3.12[74709]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo.kube state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Feb 08 11:40:08 managed-node1 python3.12[74840]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None
Feb 08 11:40:08 managed-node1 systemd[1]: Reload requested from client PID 74841 ('systemctl') (unit session-8.scope)...
Feb 08 11:40:08 managed-node1 systemd[1]: Reloading...
Feb 08 11:40:08 managed-node1 systemd-rc-local-generator[74884]: /etc/rc.d/rc.local is not marked executable, skipping.
Feb 08 11:40:08 managed-node1 systemd[1]: Reloading finished in 213 ms.
Feb 08 11:40:08 managed-node1 python3.12[75024]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:40:09 managed-node1 podman[75025]: 2025-02-08 11:40:08.95309051 -0500 EST m=+0.042445182 image untag 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f localhost:5000/libpod/testimage:20210610
Feb 08 11:40:09 managed-node1 podman[75025]: 2025-02-08 11:40:08.95309952 -0500 EST m=+0.042454205 image untag 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610
Feb 08 11:40:09 managed-node1 podman[75025]: 2025-02-08 11:40:08.930697597 -0500 EST m=+0.020051952 image remove 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f
Feb 08 11:40:09 managed-node1 podman[75025]: 2025-02-08 11:40:08.962158863 -0500 EST m=+0.051513196 image untag f408d5f24b1df22e79ff47fd950d70bbf308134db66b52ba29e6cd81b656382a localhost/podman-pause:5.3.1-1733097600
Feb 08 11:40:09 managed-node1 podman[75025]: 2025-02-08 11:40:08.953105552 -0500 EST m=+0.042459836 image remove f408d5f24b1df22e79ff47fd950d70bbf308134db66b52ba29e6cd81b656382a
Feb 08 11:40:09 managed-node1 podman[75025]: 2025-02-08 11:40:09.497714524 -0500 EST m=+0.587068924 image untag fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache
Feb 08 11:40:09 managed-node1 podman[75025]: 2025-02-08 11:40:08.962167217 -0500 EST m=+0.051521503 image remove fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b
Feb 08 11:40:09 managed-node1 podman[75025]: 2025-02-08 11:40:09.64138729 -0500 EST m=+0.730741635 image untag 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0
Feb 08 11:40:09 managed-node1 podman[75025]: 2025-02-08 11:40:09.497737666 -0500 EST m=+0.587092152 image remove 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d
Feb 08 11:40:10 managed-node1 python3.12[75163]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:40:10 managed-node1 python3.12[75301]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:40:10 managed-node1 python3.12[75440]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:40:11 managed-node1 python3.12[75578]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:40:12 managed-node1 python3.12[75993]: ansible-service_facts Invoked
Feb 08 11:40:15 managed-node1 podman[76211]: 2025-02-08 11:40:15.363471074 -0500 EST m=+0.072902285 container health_status 05159afa0f593fffd2274cad1533e16d9907a26d6e0621fcc4c448b5b2ab088f (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=1, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service)
Feb 08 11:40:15 managed-node1 systemd[1]: 05159afa0f593fffd2274cad1533e16d9907a26d6e0621fcc4c448b5b2ab088f-46ffaa87b097264a.service: Main process exited, code=exited, status=125/n/a
░░ Subject: Unit process exited
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ An ExecStart= process belonging to unit 05159afa0f593fffd2274cad1533e16d9907a26d6e0621fcc4c448b5b2ab088f-46ffaa87b097264a.service has exited.
░░
░░ The process' exit code is 'exited' and its exit status is 125.
Feb 08 11:40:15 managed-node1 systemd[1]: 05159afa0f593fffd2274cad1533e16d9907a26d6e0621fcc4c448b5b2ab088f-46ffaa87b097264a.service: Failed with result 'exit-code'.
░░ Subject: Unit failed
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit 05159afa0f593fffd2274cad1533e16d9907a26d6e0621fcc4c448b5b2ab088f-46ffaa87b097264a.service has entered the 'failed' state with result 'exit-code'.
Feb 08 11:40:15 managed-node1 python3.12[76238]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:40:16 managed-node1 python3.12[76375]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:40:17 managed-node1 python3.12[76639]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Feb 08 11:40:18 managed-node1 python3.12[76770]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None
Feb 08 11:40:18 managed-node1 systemd[1]: Reload requested from client PID 76771 ('systemctl') (unit session-8.scope)...
Feb 08 11:40:18 managed-node1 systemd[1]: Reloading...
Feb 08 11:40:18 managed-node1 systemd-rc-local-generator[76814]: /etc/rc.d/rc.local is not marked executable, skipping.
Feb 08 11:40:18 managed-node1 systemd[1]: Reloading finished in 216 ms.
Feb 08 11:40:19 managed-node1 podman[76955]: 2025-02-08 11:40:19.141960902 -0500 EST m=+0.026120492 volume remove envoy-proxy-config
Feb 08 11:40:19 managed-node1 podman[77093]: 2025-02-08 11:40:19.523798231 -0500 EST m=+0.025228441 volume remove envoy-certificates
Feb 08 11:40:19 managed-node1 podman[77231]: 2025-02-08 11:40:19.931196301 -0500 EST m=+0.058306038 volume remove wp-pv-claim
Feb 08 11:40:20 managed-node1 python3.12[77369]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:40:21 managed-node1 python3.12[77508]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:40:21 managed-node1 python3.12[77647]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:40:22 managed-node1 python3.12[77786]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:40:22 managed-node1 python3.12[77924]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:40:23 managed-node1 python3.12[78339]: ansible-service_facts Invoked
Feb 08 11:40:26 managed-node1 python3.12[78579]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:40:27 managed-node1 python3.12[78712]: ansible-stat Invoked with path=/etc/containers/systemd/envoy-proxy-configmap.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:40:28 managed-node1 python3.12[78976]: ansible-file Invoked with path=/etc/containers/systemd/envoy-proxy-configmap.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Feb 08 11:40:29 managed-node1 python3.12[79107]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None
Feb 08 11:40:29 managed-node1 systemd[1]: Reload requested from client PID 79108 ('systemctl') (unit session-8.scope)...
Feb 08 11:40:29 managed-node1 systemd[1]: Reloading...
Feb 08 11:40:29 managed-node1 systemd-rc-local-generator[79151]: /etc/rc.d/rc.local is not marked executable, skipping.
Feb 08 11:40:29 managed-node1 systemd[1]: Reloading finished in 213 ms.
Feb 08 11:40:30 managed-node1 python3.12[79291]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:40:30 managed-node1 python3.12[79430]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:40:31 managed-node1 python3.12[79568]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:40:31 managed-node1 python3.12[79706]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:40:32 managed-node1 python3.12[79845]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:40:33 managed-node1 python3.12[80260]: ansible-service_facts Invoked
Feb 08 11:40:36 managed-node1 python3.12[80500]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:40:37 managed-node1 python3.12[80633]: ansible-systemd Invoked with name=quadlet-demo-mysql.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None
Feb 08 11:40:37 managed-node1 systemd[1]: Reload requested from client PID 80636 ('systemctl') (unit session-8.scope)...
Feb 08 11:40:37 managed-node1 systemd[1]: Reloading...
Feb 08 11:40:37 managed-node1 systemd-rc-local-generator[80681]: /etc/rc.d/rc.local is not marked executable, skipping.
Feb 08 11:40:38 managed-node1 systemd[1]: Reloading finished in 213 ms.
Feb 08 11:40:38 managed-node1 systemd[1]: Stopping quadlet-demo-mysql.service...
░░ Subject: A stop job for unit quadlet-demo-mysql.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit quadlet-demo-mysql.service has begun execution.
░░
░░ The job identifier is 8113.
Feb 08 11:40:39 managed-node1 podman[80691]: 2025-02-08 11:40:39.974074885 -0500 EST m=+1.877175397 container died 05159afa0f593fffd2274cad1533e16d9907a26d6e0621fcc4c448b5b2ab088f (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service)
Feb 08 11:40:39 managed-node1 systemd[1]: 05159afa0f593fffd2274cad1533e16d9907a26d6e0621fcc4c448b5b2ab088f-46ffaa87b097264a.timer: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit 05159afa0f593fffd2274cad1533e16d9907a26d6e0621fcc4c448b5b2ab088f-46ffaa87b097264a.timer has successfully entered the 'dead' state.
Feb 08 11:40:39 managed-node1 systemd[1]: Stopped 05159afa0f593fffd2274cad1533e16d9907a26d6e0621fcc4c448b5b2ab088f-46ffaa87b097264a.timer - [systemd-run] /usr/bin/podman healthcheck run 05159afa0f593fffd2274cad1533e16d9907a26d6e0621fcc4c448b5b2ab088f.
░░ Subject: A stop job for unit 05159afa0f593fffd2274cad1533e16d9907a26d6e0621fcc4c448b5b2ab088f-46ffaa87b097264a.timer has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit 05159afa0f593fffd2274cad1533e16d9907a26d6e0621fcc4c448b5b2ab088f-46ffaa87b097264a.timer has finished.
░░
░░ The job identifier is 8114 and the job result is done.
Feb 08 11:40:40 managed-node1 systemd[1]: run-p67241-i67541.scope: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit run-p67241-i67541.scope has successfully entered the 'dead' state.
Feb 08 11:40:40 managed-node1 kernel: podman2: port 1(veth1) entered disabled state
Feb 08 11:40:40 managed-node1 kernel: veth1 (unregistering): left allmulticast mode
Feb 08 11:40:40 managed-node1 kernel: veth1 (unregistering): left promiscuous mode
Feb 08 11:40:40 managed-node1 kernel: podman2: port 1(veth1) entered disabled state
Feb 08 11:40:40 managed-node1 NetworkManager[723]: [1739032840.0350] device (podman2): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Feb 08 11:40:40 managed-node1 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service...
░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit NetworkManager-dispatcher.service has begun execution.
░░
░░ The job identifier is 8116.
Feb 08 11:40:40 managed-node1 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service.
░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A start job for unit NetworkManager-dispatcher.service has finished successfully.
░░
░░ The job identifier is 8116.
Feb 08 11:40:40 managed-node1 systemd[1]: run-netns-netns\x2d6645163b\x2db1d9\x2d20f0\x2d3422\x2d6cc367f5e9d9.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit run-netns-netns\x2d6645163b\x2db1d9\x2d20f0\x2d3422\x2d6cc367f5e9d9.mount has successfully entered the 'dead' state.
Feb 08 11:40:40 managed-node1 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-05159afa0f593fffd2274cad1533e16d9907a26d6e0621fcc4c448b5b2ab088f-userdata-shm.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay\x2dcontainers-05159afa0f593fffd2274cad1533e16d9907a26d6e0621fcc4c448b5b2ab088f-userdata-shm.mount has successfully entered the 'dead' state.
Feb 08 11:40:40 managed-node1 systemd[1]: var-lib-containers-storage-overlay-ac56aa345b7200285168a0642fef92270b7564fc6bf674bcca4f3cb46d5eb971-merged.mount: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit var-lib-containers-storage-overlay-ac56aa345b7200285168a0642fef92270b7564fc6bf674bcca4f3cb46d5eb971-merged.mount has successfully entered the 'dead' state.
Feb 08 11:40:40 managed-node1 podman[80691]: 2025-02-08 11:40:40.141334267 -0500 EST m=+2.044434719 container remove 05159afa0f593fffd2274cad1533e16d9907a26d6e0621fcc4c448b5b2ab088f (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service)
Feb 08 11:40:40 managed-node1 quadlet-demo-mysql[80691]: 05159afa0f593fffd2274cad1533e16d9907a26d6e0621fcc4c448b5b2ab088f
Feb 08 11:40:40 managed-node1 systemd[1]: quadlet-demo-mysql.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit quadlet-demo-mysql.service has successfully entered the 'dead' state.
Feb 08 11:40:40 managed-node1 systemd[1]: Stopped quadlet-demo-mysql.service.
░░ Subject: A stop job for unit quadlet-demo-mysql.service has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit quadlet-demo-mysql.service has finished.
░░
░░ The job identifier is 8113 and the job result is done.
Feb 08 11:40:40 managed-node1 systemd[1]: quadlet-demo-mysql.service: Consumed 2.960s CPU time, 620.4M memory peak.
░░ Subject: Resources consumed by unit runtime
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit quadlet-demo-mysql.service completed and consumed the indicated resources.
Feb 08 11:40:40 managed-node1 python3.12[80869]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:40:41 managed-node1 python3.12[81133]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.container state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Feb 08 11:40:42 managed-node1 python3.12[81264]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None
Feb 08 11:40:42 managed-node1 systemd[1]: Reload requested from client PID 81265 ('systemctl') (unit session-8.scope)...
Feb 08 11:40:42 managed-node1 systemd[1]: Reloading...
Feb 08 11:40:42 managed-node1 systemd-rc-local-generator[81301]: /etc/rc.d/rc.local is not marked executable, skipping.
Feb 08 11:40:42 managed-node1 systemd[1]: Reloading finished in 219 ms.
Feb 08 11:40:43 managed-node1 python3.12[81587]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:40:43 managed-node1 podman[81588]: 2025-02-08 11:40:43.428850082 -0500 EST m=+0.243918608 image untag dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6
Feb 08 11:40:43 managed-node1 podman[81588]: 2025-02-08 11:40:43.20236734 -0500 EST m=+0.017435919 image remove dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5
Feb 08 11:40:43 managed-node1 python3.12[81727]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:40:44 managed-node1 python3.12[81865]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:40:44 managed-node1 python3.12[82004]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:40:45 managed-node1 python3.12[82143]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:40:46 managed-node1 python3.12[82558]: ansible-service_facts Invoked
Feb 08 11:40:48 managed-node1 python3.12[82797]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:40:50 managed-node1 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.
Feb 08 11:40:50 managed-node1 python3.12[82930]: ansible-systemd Invoked with name=quadlet-demo-mysql-volume.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None
Feb 08 11:40:50 managed-node1 systemd[1]: Reload requested from client PID 82934 ('systemctl') (unit session-8.scope)...
Feb 08 11:40:50 managed-node1 systemd[1]: Reloading...
Feb 08 11:40:50 managed-node1 systemd-rc-local-generator[82980]: /etc/rc.d/rc.local is not marked executable, skipping.
Feb 08 11:40:50 managed-node1 systemd[1]: Reloading finished in 198 ms.
Feb 08 11:40:50 managed-node1 systemd[1]: quadlet-demo-mysql-volume.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit quadlet-demo-mysql-volume.service has successfully entered the 'dead' state.
Feb 08 11:40:50 managed-node1 systemd[1]: Stopped quadlet-demo-mysql-volume.service.
░░ Subject: A stop job for unit quadlet-demo-mysql-volume.service has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit quadlet-demo-mysql-volume.service has finished.
░░
░░ The job identifier is 8195 and the job result is done.
Feb 08 11:40:50 managed-node1 python3.12[83119]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.volume follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:40:51 managed-node1 python3.12[83383]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.volume state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Feb 08 11:40:52 managed-node1 python3.12[83514]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None
Feb 08 11:40:52 managed-node1 systemd[1]: Reload requested from client PID 83515 ('systemctl') (unit session-8.scope)...
Feb 08 11:40:52 managed-node1 systemd[1]: Reloading...
Feb 08 11:40:52 managed-node1 systemd-rc-local-generator[83561]: /etc/rc.d/rc.local is not marked executable, skipping.
Feb 08 11:40:52 managed-node1 systemd[1]: Reloading finished in 200 ms.
Feb 08 11:40:52 managed-node1 podman[83699]: 2025-02-08 11:40:52.911074694 -0500 EST m=+0.028934394 volume remove systemd-quadlet-demo-mysql
Feb 08 11:40:53 managed-node1 python3.12[83837]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:40:53 managed-node1 python3.12[83975]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:40:54 managed-node1 python3.12[84113]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:40:54 managed-node1 python3.12[84251]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:40:55 managed-node1 python3.12[84389]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:40:56 managed-node1 python3.12[84804]: ansible-service_facts Invoked
Feb 08 11:40:59 managed-node1 python3.12[85044]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:41:00 managed-node1 python3.12[85177]: ansible-systemd Invoked with name=quadlet-demo-network.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None
Feb 08 11:41:00 managed-node1 systemd[1]: Reload requested from client PID 85180 ('systemctl') (unit session-8.scope)...
Feb 08 11:41:00 managed-node1 systemd[1]: Reloading...
Feb 08 11:41:00 managed-node1 systemd-rc-local-generator[85223]: /etc/rc.d/rc.local is not marked executable, skipping.
Feb 08 11:41:00 managed-node1 systemd[1]: Reloading finished in 199 ms.
Feb 08 11:41:00 managed-node1 systemd[1]: quadlet-demo-network.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ The unit quadlet-demo-network.service has successfully entered the 'dead' state.
Feb 08 11:41:00 managed-node1 systemd[1]: Stopped quadlet-demo-network.service.
░░ Subject: A stop job for unit quadlet-demo-network.service has finished
░░ Defined-By: systemd
░░ Support: https://access.redhat.com/support
░░
░░ A stop job for unit quadlet-demo-network.service has finished.
░░
░░ The job identifier is 8196 and the job result is done.
Feb 08 11:41:01 managed-node1 python3.12[85365]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo.network follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Feb 08 11:41:02 managed-node1 python3.12[85629]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo.network state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Feb 08 11:41:02 managed-node1 python3.12[85760]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None
Feb 08 11:41:02 managed-node1 systemd[1]: Reload requested from client PID 85761 ('systemctl') (unit session-8.scope)...
Feb 08 11:41:02 managed-node1 systemd[1]: Reloading...
Feb 08 11:41:02 managed-node1 systemd-rc-local-generator[85795]: /etc/rc.d/rc.local is not marked executable, skipping.
Feb 08 11:41:02 managed-node1 systemd[1]: Reloading finished in 206 ms.
Feb 08 11:41:03 managed-node1 python3.12[86083]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:41:04 managed-node1 python3.12[86221]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:41:05 managed-node1 python3.12[86359]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:41:05 managed-node1 python3.12[86498]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:41:05 managed-node1 python3.12[86637]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:41:07 managed-node1 python3.12[87052]: ansible-service_facts Invoked
Feb 08 11:41:09 managed-node1 python3.12[87292]: ansible-ansible.legacy.command Invoked with _raw_params=exec 1>&2
set -x
set -o pipefail
systemctl list-units --plain -l --all | grep quadlet || :
systemctl list-unit-files --all | grep quadlet || :
systemctl list-units --plain --failed -l --all | grep quadlet || :
_uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Feb 08 11:41:10 managed-node1 python3.12[87430]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
PLAY RECAP *********************************************************************
managed-node1 : ok=411 changed=44 unreachable=0 failed=1 skipped=443 rescued=1 ignored=0
TASKS RECAP ********************************************************************
Saturday 08 February 2025 11:41:10 -0500 (0:00:00.429) 0:02:45.244 *****
===============================================================================
fedora.linux_system_roles.podman : Ensure container images are present -- 15.91s
/tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
fedora.linux_system_roles.podman : Ensure container images are present --- 6.26s
/tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
fedora.linux_system_roles.podman : Stop and disable service ------------- 2.95s
/tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12
fedora.linux_system_roles.podman : For testing and debugging - services --- 2.28s
/tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197
fedora.linux_system_roles.podman : For testing and debugging - services --- 2.17s
/tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197
fedora.linux_system_roles.podman : For testing and debugging - services --- 2.15s
/tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197
fedora.linux_system_roles.podman : For testing and debugging - services --- 2.08s
/tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197
fedora.linux_system_roles.podman : For testing and debugging - services --- 2.06s
/tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197
fedora.linux_system_roles.podman : For testing and debugging - services --- 2.06s
/tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197
fedora.linux_system_roles.podman : Stop and disable service ------------- 1.41s
/tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12
Gathering Facts --------------------------------------------------------- 1.41s
/tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:9
fedora.linux_system_roles.podman : Remove volumes ----------------------- 1.26s
/tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99
Check web --------------------------------------------------------------- 1.24s
/tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:121
fedora.linux_system_roles.certificate : Slurp the contents of the files --- 1.18s
/tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:152
fedora.linux_system_roles.podman : Start service ------------------------ 1.17s
/tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110
fedora.linux_system_roles.podman : Prune images no longer in use -------- 1.13s
/tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120
fedora.linux_system_roles.firewall : Enable and start firewalld service --- 1.12s
/tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28
fedora.linux_system_roles.certificate : Remove files -------------------- 1.12s
/tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:181
fedora.linux_system_roles.firewall : Configure firewall ----------------- 1.11s
/tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71
fedora.linux_system_roles.podman : Gather the package facts ------------- 1.08s
/tmp/collections-yxu/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6