ansible-playbook 2.9.27
config file = None
configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/local/lib/python3.9/site-packages/ansible
executable location = /usr/local/bin/ansible-playbook
python version = 3.9.19 (main, May 16 2024, 11:40:09) [GCC 8.5.0 20210514 (Red Hat 8.5.0-22)]
No config file found; using defaults
[WARNING]: running playbook inside collection fedora.linux_system_roles
Skipping callback 'actionable', as we already have a stdout callback.
Skipping callback 'counter_enabled', as we already have a stdout callback.
Skipping callback 'debug', as we already have a stdout callback.
Skipping callback 'dense', as we already have a stdout callback.
Skipping callback 'dense', as we already have a stdout callback.
Skipping callback 'full_skip', as we already have a stdout callback.
Skipping callback 'json', as we already have a stdout callback.
Skipping callback 'minimal', as we already have a stdout callback.
Skipping callback 'null', as we already have a stdout callback.
Skipping callback 'oneline', as we already have a stdout callback.
Skipping callback 'selective', as we already have a stdout callback.
Skipping callback 'skippy', as we already have a stdout callback.
Skipping callback 'stderr', as we already have a stdout callback.
Skipping callback 'unixy', as we already have a stdout callback.
Skipping callback 'yaml', as we already have a stdout callback.
PLAYBOOK: tests_quadlet_demo.yml ***********************************************
2 plays in /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml
PLAY [all] *********************************************************************
META: ran handlers
TASK [Include vault variables] *************************************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:5
Saturday 10 August 2024 12:46:36 -0400 (0:00:00.021) 0:00:00.021 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_test_password": {
"__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n35383939616163653333633431363463313831383037386236646138333162396161356130303461\n3932623930643263313563336163316337643562333936360a363538636631313039343233383732\n38666530383538656639363465313230343533386130303833336434303438333161656262346562\n3362626538613031640a663330613638366132356534363534353239616666653466353961323533\n6565\n"
},
"mysql_container_root_password": {
"__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n61333932373230333539663035366431326163363166363036323963623131363530326231303634\n6635326161643165363366323062333334363730376631660a393566366139353861656364656661\n38653463363837336639363032646433666361646535366137303464623261313663643336306465\n6264663730656337310a343962353137386238383064646533366433333437303566656433386233\n34343235326665646661623131643335313236313131353661386338343366316261643634653633\n3832313034366536616531323963333234326461353130303532\n"
}
},
"ansible_included_var_files": [
"/tmp/podman-Y73/tests/vars/vault-variables.yml"
],
"changed": false
}
META: ran handlers
META: ran handlers
PLAY [Deploy the quadlet demo app] *********************************************
TASK [Gathering Facts] *********************************************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:9
Saturday 10 August 2024 12:46:36 -0400 (0:00:00.019) 0:00:00.041 *******
ok: [managed_node1]
META: ran handlers
TASK [Generate certificates] ***************************************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:39
Saturday 10 August 2024 12:46:37 -0400 (0:00:00.934) 0:00:00.975 *******
TASK [fedora.linux_system_roles.certificate : Set version specific variables] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:2
Saturday 10 August 2024 12:46:37 -0400 (0:00:00.036) 0:00:01.011 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml for managed_node1
TASK [fedora.linux_system_roles.certificate : Ensure ansible_facts used by role] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:2
Saturday 10 August 2024 12:46:37 -0400 (0:00:00.022) 0:00:01.033 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.certificate : Check if system is ostree] *******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:10
Saturday 10 August 2024 12:46:37 -0400 (0:00:00.039) 0:00:01.073 *******
ok: [managed_node1] => {
"changed": false,
"stat": {
"exists": false
}
}
TASK [fedora.linux_system_roles.certificate : Set flag to indicate system is ostree] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:15
Saturday 10 August 2024 12:46:37 -0400 (0:00:00.460) 0:00:01.533 *******
ok: [managed_node1] => {
"ansible_facts": {
"__certificate_is_ostree": false
},
"changed": false
}
TASK [fedora.linux_system_roles.certificate : Set platform/version specific variables] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:19
Saturday 10 August 2024 12:46:37 -0400 (0:00:00.062) 0:00:01.595 *******
skipping: [managed_node1] => (item=RedHat.yml) => {
"ansible_loop_var": "item",
"changed": false,
"item": "RedHat.yml",
"skip_reason": "Conditional result was False"
}
skipping: [managed_node1] => (item=CentOS.yml) => {
"ansible_loop_var": "item",
"changed": false,
"item": "CentOS.yml",
"skip_reason": "Conditional result was False"
}
skipping: [managed_node1] => (item=CentOS_8.yml) => {
"ansible_loop_var": "item",
"changed": false,
"item": "CentOS_8.yml",
"skip_reason": "Conditional result was False"
}
skipping: [managed_node1] => (item=CentOS_8.yml) => {
"ansible_loop_var": "item",
"changed": false,
"item": "CentOS_8.yml",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.certificate : Ensure certificate role dependencies are installed] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:5
Saturday 10 August 2024 12:46:37 -0400 (0:00:00.058) 0:00:01.654 *******
ok: [managed_node1] => {
"changed": false,
"rc": 0,
"results": []
}
MSG:
Nothing to do
lsrpackages: python3-cryptography python3-dbus python3-pyasn1
TASK [fedora.linux_system_roles.certificate : Ensure provider packages are installed] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:23
Saturday 10 August 2024 12:46:41 -0400 (0:00:04.068) 0:00:05.723 *******
ok: [managed_node1] => (item=certmonger) => {
"__certificate_provider": "certmonger",
"ansible_loop_var": "__certificate_provider",
"changed": false,
"rc": 0,
"results": []
}
MSG:
Nothing to do
lsrpackages: certmonger
TASK [fedora.linux_system_roles.certificate : Ensure pre-scripts hooks directory exists] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:35
Saturday 10 August 2024 12:46:45 -0400 (0:00:03.319) 0:00:09.043 *******
ok: [managed_node1] => (item=certmonger) => {
"__certificate_provider": "certmonger",
"ansible_loop_var": "__certificate_provider",
"changed": false,
"gid": 0,
"group": "root",
"mode": "0700",
"owner": "root",
"path": "/etc/certmonger//pre-scripts",
"secontext": "unconfined_u:object_r:etc_t:s0",
"size": 6,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.certificate : Ensure post-scripts hooks directory exists] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:61
Saturday 10 August 2024 12:46:45 -0400 (0:00:00.502) 0:00:09.545 *******
ok: [managed_node1] => (item=certmonger) => {
"__certificate_provider": "certmonger",
"ansible_loop_var": "__certificate_provider",
"changed": false,
"gid": 0,
"group": "root",
"mode": "0700",
"owner": "root",
"path": "/etc/certmonger//post-scripts",
"secontext": "unconfined_u:object_r:etc_t:s0",
"size": 6,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.certificate : Ensure provider service is running] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:90
Saturday 10 August 2024 12:46:46 -0400 (0:00:00.394) 0:00:09.939 *******
ok: [managed_node1] => (item=certmonger) => {
"__certificate_provider": "certmonger",
"ansible_loop_var": "__certificate_provider",
"changed": false,
"enabled": true,
"name": "certmonger",
"state": "started",
"status": {
"ActiveEnterTimestamp": "Sat 2024-08-10 12:42:23 EDT",
"ActiveEnterTimestampMonotonic": "8508433",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "active",
"After": "syslog.target network.target dbus.socket systemd-journald.socket sysinit.target dbus.service system.slice basic.target",
"AllowIsolate": "no",
"AllowedCPUs": "",
"AllowedMemoryNodes": "",
"AmbientCapabilities": "",
"AssertResult": "yes",
"AssertTimestamp": "Sat 2024-08-10 12:42:23 EDT",
"AssertTimestampMonotonic": "8414887",
"Before": "multi-user.target shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"BusName": "org.fedorahosted.certmonger",
"CPUAccounting": "no",
"CPUAffinity": "",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf",
"CollectMode": "inactive",
"ConditionResult": "yes",
"ConditionTimestamp": "Sat 2024-08-10 12:42:23 EDT",
"ConditionTimestampMonotonic": "8414886",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlGroup": "/system.slice/certmonger.service",
"ControlPID": "0",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "Certificate monitoring and PKI enrollment",
"DevicePolicy": "auto",
"DynamicUser": "no",
"EffectiveCPUs": "",
"EffectiveMemoryNodes": "",
"EnvironmentFiles": "/etc/sysconfig/certmonger (ignore_errors=yes)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "671",
"ExecMainStartTimestamp": "Sat 2024-08-10 12:42:23 EDT",
"ExecMainStartTimestampMonotonic": "8415766",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/sbin/certmonger ; argv[]=/usr/sbin/certmonger -S -p /run/certmonger.pid -n $OPTS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FragmentPath": "/usr/lib/systemd/system/certmonger.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOSchedulingClass": "0",
"IOSchedulingPriority": "0",
"IOWeight": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "18446744073709551615",
"IPEgressPackets": "18446744073709551615",
"IPIngressBytes": "18446744073709551615",
"IPIngressPackets": "18446744073709551615",
"Id": "certmonger.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestamp": "Sat 2024-08-10 12:42:23 EDT",
"InactiveExitTimestampMonotonic": "8415805",
"InvocationID": "0a3e0627a65540ce9e4c87e99d517c28",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "control-group",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "0",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "65536",
"LimitMEMLOCKSoft": "65536",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "262144",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14003",
"LimitNPROCSoft": "14003",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14003",
"LimitSIGPENDINGSoft": "14003",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "671",
"MemoryAccounting": "yes",
"MemoryCurrent": "8544256",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"MountFlags": "",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAMask": "",
"NUMAPolicy": "n/a",
"Names": "certmonger.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"PIDFile": "/run/certmonger.pid",
"PartOf": "dbus.service",
"PermissionsStartOnly": "no",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "dbus.socket system.slice sysinit.target",
"Restart": "no",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"StandardError": "inherit",
"StandardInput": "null",
"StandardInputData": "",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestamp": "Sat 2024-08-10 12:42:23 EDT",
"StateChangeTimestampMonotonic": "8508433",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "running",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "0",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "1",
"TasksMax": "22405",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "dbus",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "enabled",
"UtmpMode": "init",
"WantedBy": "multi-user.target",
"WatchdogTimestamp": "Sat 2024-08-10 12:42:23 EDT",
"WatchdogTimestampMonotonic": "8508431",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.certificate : Ensure certificate requests] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:101
Saturday 10 August 2024 12:46:46 -0400 (0:00:00.711) 0:00:10.651 *******
changed: [managed_node1] => (item={'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}) => {
"ansible_loop_var": "item",
"changed": true,
"item": {
"ca": "self-sign",
"dns": [
"localhost"
],
"name": "quadlet_demo"
}
}
MSG:
Certificate requested (new).
TASK [fedora.linux_system_roles.certificate : Slurp the contents of the files] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:152
Saturday 10 August 2024 12:46:47 -0400 (0:00:00.953) 0:00:11.605 *******
ok: [managed_node1] => (item=['cert', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => {
"ansible_loop_var": "item",
"changed": false,
"content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURnekNDQW11Z0F3SUJBZ0lSQUtyZVMzQjNEMEtXbmh6NUNhb3JhZzB3RFFZSktvWklodmNOQVFFTEJRQXcKVURFZ01CNEdBMVVFQXd3WFRHOWpZV3dnVTJsbmJtbHVaeUJCZFhSb2IzSnBkSGt4TERBcUJnTlZCQU1NSTJGaApaR1UwWWpjd0xUYzNNR1kwTWprMkxUbGxNV05tT1RBNUxXRmhNbUkyWVRCaU1CNFhEVEkwTURneE1ERTJORFkwCk4xb1hEVEkxTURneE1ERTJNekEwTWxvd0ZERVNNQkFHQTFVRUF4TUpiRzlqWVd4b2IzTjBNSUlCSWpBTkJna3EKaGtpRzl3MEJBUUVGQUFPQ0FROEFNSUlCQ2dLQ0FRRUFzTEliNHFMb1Q5QUhscDRuMzA0dm1LWUFGWDFJbXJJVwpxbFRCaVpoOVk4WUc4dytJaVZDeFdmbjhtb1pMeC9Pb2dHY2RVS3MvMGlORjNic24wWC96RzdLZnNRWjh1RDU0Cm9XR3FJcE5pYm1nSnE0N1kvNmZwMERKNlAyOHNOR2I2a2xWL2QvaXY4WU9tbjJPcGFIMnpyVVhJdG91cXVFY2YKUnVvZCtBMVBVQVlzaDRCNVU1NEh2NWsxMG1FU1FXdUZOZ2h3SEJheUtQMHErWTNxL2gyUlRZMlIyb3cwdTRwQgp5Qm5vU2dnbjRaeExkdnViVTMxa3dtWmNVL09TR1pYVXhFN2oxRW9yWmw1MU1GK0x3aVBxWVN2MTVUWmxzL2hqClhCOVZvNVJ0M2ZnYW4zYWsxazlEVHlEQU9pK0kzVTJPcG9SbzBVNjExSnlmR1crZ1R3SlJMUUlEQVFBQm80R1QKTUlHUU1Bc0dBMVVkRHdRRUF3SUZvREFVQmdOVkhSRUVEVEFMZ2dsc2IyTmhiR2h2YzNRd0hRWURWUjBsQkJZdwpGQVlJS3dZQkJRVUhBd0VHQ0NzR0FRVUZCd01DTUF3R0ExVWRFd0VCL3dRQ01BQXdIUVlEVlIwT0JCWUVGT0VlCkx1bTljRTU0OXQ5ODdsb1Y0aWFzOHBjMU1COEdBMVVkSXdRWU1CYUFGQlVRL091M2JNQW9GU0xPSWY4ZW82cXkKUlZFN01BMEdDU3FHU0liM0RRRUJDd1VBQTRJQkFRQ3haLzFGRXFXYlVlWXFVL1Mrcis4TlI2Y0Q1ZExnQnRJZApPTk9yRGVlR3dEMjgrQjcrUmpKU3VzbFpuN3ZIcWo0dGd3RVI4ZDgvcmpWMjZFNUx2WGJVeW5VSTJUL21NU2ZxCloycWdLY1pBVzhtbWlKV1VXQnlpZ1REQWdoSmlxeU1RVGt2NU0rVVcrcU1kWmRrdFNhNVNPNTNibzBxY09uSnQKeHR5U1ZiTFJVRGRQUFcxNjY5U0I1OER4K1NySm9Qb1Z2VS9IaDZ0RWJJTVJsRFhVc2hsQVA1M0Y5aTVyOEwzTQpGS1g2c2dtQklmNWIwZnFKSXIwbTd1YmFHWGU1WWpXSmoxVDU0aktYZjFSVnFkK0ZEcFZvaUJDTlhtc256ZHR1CnVlaU8zTURWb0hoemN6clVNbzJPMG1XUVQzMnNxTUFveEhkZTlPOERkTW85a2JqMjB5VWEKLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=",
"encoding": "base64",
"item": [
"cert",
{
"ca": "self-sign",
"dns": [
"localhost"
],
"name": "quadlet_demo"
}
],
"source": "/etc/pki/tls/certs/quadlet_demo.crt"
}
ok: [managed_node1] => (item=['key', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => {
"ansible_loop_var": "item",
"changed": false,
"content": "LS0tLS1CRUdJTiBQUklWQVRFIEtFWS0tLS0tCk1JSUV2UUlCQURBTkJna3Foa2lHOXcwQkFRRUZBQVNDQktjd2dnU2pBZ0VBQW9JQkFRQ3dzaHZpb3VoUDBBZVcKbmlmZlRpK1lwZ0FWZlVpYXNoYXFWTUdKbUgxanhnYnpENGlKVUxGWitmeWFoa3ZIODZpQVp4MVFxei9TSTBYZAp1eWZSZi9NYnNwK3hCbnk0UG5paFlhb2lrMkp1YUFtcmp0ai9wK25RTW5vL2J5dzBadnFTVlg5MytLL3hnNmFmClk2bG9mYk90UmNpMmk2cTRSeDlHNmgzNERVOVFCaXlIZ0hsVG5nZS9tVFhTWVJKQmE0VTJDSEFjRnJJby9TcjUKamVyK0haRk5qWkhhakRTN2lrSElHZWhLQ0NmaG5FdDIrNXRUZldUQ1pseFQ4NUlabGRURVR1UFVTaXRtWG5VdwpYNHZDSStwaEsvWGxObVd6K0dOY0gxV2psRzNkK0JxZmRxVFdUME5QSU1BNkw0amRUWTZtaEdqUlRyWFVuSjhaCmI2QlBBbEV0QWdNQkFBRUNnZ0VBWExOazI5ZGFDQjE5RGJFMSsvcmFEN3p2aUZhdFU3T2hIb1hPNy9CWHdKT3gKSzYzM2xRc0hzTXZ0a2VocE02TUt3U3JYLzIySTB5ZDhUU01XZGR3cDRGc3JXc1dyZzBJTEthZkNpZGRIS2VOZApSMnR2UDFtSjJwZlErUnpMVlVhOXFTNC83T0pFOUhGOFpsejBFUVQ1MGlzdGJWQURKaUR5TU5NOUc5bG54ZlJICmdMZUx2Z0R3bkkwdDFOalVIa1QwSEp2Ykl1REttWmQvMWpvYXJuZVMwTGljUG9BTkhFdkE0aGR4NlpOUklRRGoKdm94VGVZM3lCUDY3aTJiakpwQzdPbjFNNHloSU9uUDNSRFdVUTVGR0tDMjJRRmpJU0hpZG02U2xPRWFGbWlqMAo5UW15WW85Mk5jMytvY1ZqVXdjQlhURGxTTXR6NWxkNGc5OVlBQW5YeFFLQmdRRGt2S3ZPQVpmaHV4VUZXVmh5CmlPa0h4MzRnUWE2WUo3WkdwK1ZSeTJLNnAvcWFlL3BCNmZobzQ2TFFEVzR3QTE3WjR3aDNBYzJ2Zm51Q2JhMG0KUWhaRkJabWo5Zzk0ZVRMcGovMGorM004MHlrcnNHQ3dVRkd0Z2FWOVVwbjMrZVIyWklZeXljdHc4OGtSa1dkRAp5MW5Rc2tMdzY1OUZIMmJRakJSc3I1V2dod0tCZ1FERndZZm81QlArSFFPRkpwV1ZTQWVJRDFITGMzcmovQmdXCkJSNXVwbXlYVjdTWkdjR1liVng0QlFsNVZYZkVBQWNadlZpeEFPSmMvYzM1TFY2TVVzUjhoTmhEVjN4d3IzbVQKcHNnQThkY3FmcllYRk1Zb1ZzMEwveE9MVm1Tb080cnlyeFcxRUhScktONGZoVUp0bGUzbTI4UHlxVjBPd1lBQQpzZ0VXMThmeHF3S0JnUUNYR1ByMHpxQ2lBSTlXUlRya0RnVEJtc3A2VEdrbEliYUszZ1hpRGxSZHNQRWVjNTlSClExbFJFRzVRdVU1OGNQMkxPU295VXdLb1Z3a0hrNk81amhaZjZkOGdMRDd6K2p2amhiWld1cGZaS2pRY0pCcjYKM1ZQWEpibElHL1NSbG9XUW43NzJkYzdxQW1Ca05vVGNoOHdBeklEajNRSU5DejFEcmdVZjlxMEVxUUtCZ0I4ZwowNC9wTnV4dERMek0wZ2JwaTZYekkyRHZFSTBLaFpBUnhqU01wZE04endTNmRTclZMQlVJVmZGdmJxYS9pbDZXCjIzc2hOSUNHd3dGU0R1dnZxMVllWXh4V2pSUC95eGRleldTNHJMSFh1RG54M3k5Q1dveGszbUswZXF1d2s3S1gKdE1jSGpBdkNKeFMzR1RtK3UrLzI3K1VtbTZ0UzhoOHA1ZG4rcFU1NUFvR0FQeUF6RUo3Y3BGQS9xWGxQQU1BZgplQ3RvNVZDeTlIaUtidnQ3Y2dXQnJ5VVRwdWJpUDZBNG1TdzJiRmR3UUFka3QvUTlWdUFDRXZXSUlISVdkSlFjClZRNkpkaldkUVVZVTB2TXlrUmJTVWxpbDFVOXpOUnRLSDgxN0d6SDVpSFFWem93a0N5Z3FpRmN1ZG5LdHVMZS8KTDB3OUI0VklKSGIxWk1ZQlFmbkRWejQ9Ci0tLS0tRU5EIFBSSVZBVEUgS0VZLS0tLS0K",
"encoding": "base64",
"item": [
"key",
{
"ca": "self-sign",
"dns": [
"localhost"
],
"name": "quadlet_demo"
}
],
"source": "/etc/pki/tls/private/quadlet_demo.key"
}
ok: [managed_node1] => (item=['ca', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => {
"ansible_loop_var": "item",
"changed": false,
"content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURnekNDQW11Z0F3SUJBZ0lSQUtyZVMzQjNEMEtXbmh6NUNhb3JhZzB3RFFZSktvWklodmNOQVFFTEJRQXcKVURFZ01CNEdBMVVFQXd3WFRHOWpZV3dnVTJsbmJtbHVaeUJCZFhSb2IzSnBkSGt4TERBcUJnTlZCQU1NSTJGaApaR1UwWWpjd0xUYzNNR1kwTWprMkxUbGxNV05tT1RBNUxXRmhNbUkyWVRCaU1CNFhEVEkwTURneE1ERTJORFkwCk4xb1hEVEkxTURneE1ERTJNekEwTWxvd0ZERVNNQkFHQTFVRUF4TUpiRzlqWVd4b2IzTjBNSUlCSWpBTkJna3EKaGtpRzl3MEJBUUVGQUFPQ0FROEFNSUlCQ2dLQ0FRRUFzTEliNHFMb1Q5QUhscDRuMzA0dm1LWUFGWDFJbXJJVwpxbFRCaVpoOVk4WUc4dytJaVZDeFdmbjhtb1pMeC9Pb2dHY2RVS3MvMGlORjNic24wWC96RzdLZnNRWjh1RDU0Cm9XR3FJcE5pYm1nSnE0N1kvNmZwMERKNlAyOHNOR2I2a2xWL2QvaXY4WU9tbjJPcGFIMnpyVVhJdG91cXVFY2YKUnVvZCtBMVBVQVlzaDRCNVU1NEh2NWsxMG1FU1FXdUZOZ2h3SEJheUtQMHErWTNxL2gyUlRZMlIyb3cwdTRwQgp5Qm5vU2dnbjRaeExkdnViVTMxa3dtWmNVL09TR1pYVXhFN2oxRW9yWmw1MU1GK0x3aVBxWVN2MTVUWmxzL2hqClhCOVZvNVJ0M2ZnYW4zYWsxazlEVHlEQU9pK0kzVTJPcG9SbzBVNjExSnlmR1crZ1R3SlJMUUlEQVFBQm80R1QKTUlHUU1Bc0dBMVVkRHdRRUF3SUZvREFVQmdOVkhSRUVEVEFMZ2dsc2IyTmhiR2h2YzNRd0hRWURWUjBsQkJZdwpGQVlJS3dZQkJRVUhBd0VHQ0NzR0FRVUZCd01DTUF3R0ExVWRFd0VCL3dRQ01BQXdIUVlEVlIwT0JCWUVGT0VlCkx1bTljRTU0OXQ5ODdsb1Y0aWFzOHBjMU1COEdBMVVkSXdRWU1CYUFGQlVRL091M2JNQW9GU0xPSWY4ZW82cXkKUlZFN01BMEdDU3FHU0liM0RRRUJDd1VBQTRJQkFRQ3haLzFGRXFXYlVlWXFVL1Mrcis4TlI2Y0Q1ZExnQnRJZApPTk9yRGVlR3dEMjgrQjcrUmpKU3VzbFpuN3ZIcWo0dGd3RVI4ZDgvcmpWMjZFNUx2WGJVeW5VSTJUL21NU2ZxCloycWdLY1pBVzhtbWlKV1VXQnlpZ1REQWdoSmlxeU1RVGt2NU0rVVcrcU1kWmRrdFNhNVNPNTNibzBxY09uSnQKeHR5U1ZiTFJVRGRQUFcxNjY5U0I1OER4K1NySm9Qb1Z2VS9IaDZ0RWJJTVJsRFhVc2hsQVA1M0Y5aTVyOEwzTQpGS1g2c2dtQklmNWIwZnFKSXIwbTd1YmFHWGU1WWpXSmoxVDU0aktYZjFSVnFkK0ZEcFZvaUJDTlhtc256ZHR1CnVlaU8zTURWb0hoemN6clVNbzJPMG1XUVQzMnNxTUFveEhkZTlPOERkTW85a2JqMjB5VWEKLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=",
"encoding": "base64",
"item": [
"ca",
{
"ca": "self-sign",
"dns": [
"localhost"
],
"name": "quadlet_demo"
}
],
"source": "/etc/pki/tls/certs/quadlet_demo.crt"
}
TASK [fedora.linux_system_roles.certificate : Create return data] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:160
Saturday 10 August 2024 12:46:48 -0400 (0:00:01.153) 0:00:12.759 *******
ok: [managed_node1] => {
"ansible_facts": {
"certificate_test_certs": {
"quadlet_demo": {
"ca": "/etc/pki/tls/certs/quadlet_demo.crt",
"ca_content": "-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRAKreS3B3D0KWnhz5Caorag0wDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMI2Fh\nZGU0YjcwLTc3MGY0Mjk2LTllMWNmOTA5LWFhMmI2YTBiMB4XDTI0MDgxMDE2NDY0\nN1oXDTI1MDgxMDE2MzA0MlowFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsLIb4qLoT9AHlp4n304vmKYAFX1ImrIW\nqlTBiZh9Y8YG8w+IiVCxWfn8moZLx/OogGcdUKs/0iNF3bsn0X/zG7KfsQZ8uD54\noWGqIpNibmgJq47Y/6fp0DJ6P28sNGb6klV/d/iv8YOmn2OpaH2zrUXItouquEcf\nRuod+A1PUAYsh4B5U54Hv5k10mESQWuFNghwHBayKP0q+Y3q/h2RTY2R2ow0u4pB\nyBnoSggn4ZxLdvubU31kwmZcU/OSGZXUxE7j1EorZl51MF+LwiPqYSv15TZls/hj\nXB9Vo5Rt3fgan3ak1k9DTyDAOi+I3U2OpoRo0U611JyfGW+gTwJRLQIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFOEe\nLum9cE549t987loV4ias8pc1MB8GA1UdIwQYMBaAFBUQ/Ou3bMAoFSLOIf8eo6qy\nRVE7MA0GCSqGSIb3DQEBCwUAA4IBAQCxZ/1FEqWbUeYqU/S+r+8NR6cD5dLgBtId\nONOrDeeGwD28+B7+RjJSuslZn7vHqj4tgwER8d8/rjV26E5LvXbUynUI2T/mMSfq\nZ2qgKcZAW8mmiJWUWByigTDAghJiqyMQTkv5M+UW+qMdZdktSa5SO53bo0qcOnJt\nxtySVbLRUDdPPW1669SB58Dx+SrJoPoVvU/Hh6tEbIMRlDXUshlAP53F9i5r8L3M\nFKX6sgmBIf5b0fqJIr0m7ubaGXe5YjWJj1T54jKXf1RVqd+FDpVoiBCNXmsnzdtu\nueiO3MDVoHhzczrUMo2O0mWQT32sqMAoxHde9O8DdMo9kbj20yUa\n-----END CERTIFICATE-----\n",
"cert": "/etc/pki/tls/certs/quadlet_demo.crt",
"cert_content": "-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRAKreS3B3D0KWnhz5Caorag0wDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMI2Fh\nZGU0YjcwLTc3MGY0Mjk2LTllMWNmOTA5LWFhMmI2YTBiMB4XDTI0MDgxMDE2NDY0\nN1oXDTI1MDgxMDE2MzA0MlowFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsLIb4qLoT9AHlp4n304vmKYAFX1ImrIW\nqlTBiZh9Y8YG8w+IiVCxWfn8moZLx/OogGcdUKs/0iNF3bsn0X/zG7KfsQZ8uD54\noWGqIpNibmgJq47Y/6fp0DJ6P28sNGb6klV/d/iv8YOmn2OpaH2zrUXItouquEcf\nRuod+A1PUAYsh4B5U54Hv5k10mESQWuFNghwHBayKP0q+Y3q/h2RTY2R2ow0u4pB\nyBnoSggn4ZxLdvubU31kwmZcU/OSGZXUxE7j1EorZl51MF+LwiPqYSv15TZls/hj\nXB9Vo5Rt3fgan3ak1k9DTyDAOi+I3U2OpoRo0U611JyfGW+gTwJRLQIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFOEe\nLum9cE549t987loV4ias8pc1MB8GA1UdIwQYMBaAFBUQ/Ou3bMAoFSLOIf8eo6qy\nRVE7MA0GCSqGSIb3DQEBCwUAA4IBAQCxZ/1FEqWbUeYqU/S+r+8NR6cD5dLgBtId\nONOrDeeGwD28+B7+RjJSuslZn7vHqj4tgwER8d8/rjV26E5LvXbUynUI2T/mMSfq\nZ2qgKcZAW8mmiJWUWByigTDAghJiqyMQTkv5M+UW+qMdZdktSa5SO53bo0qcOnJt\nxtySVbLRUDdPPW1669SB58Dx+SrJoPoVvU/Hh6tEbIMRlDXUshlAP53F9i5r8L3M\nFKX6sgmBIf5b0fqJIr0m7ubaGXe5YjWJj1T54jKXf1RVqd+FDpVoiBCNXmsnzdtu\nueiO3MDVoHhzczrUMo2O0mWQT32sqMAoxHde9O8DdMo9kbj20yUa\n-----END CERTIFICATE-----\n",
"key": "/etc/pki/tls/private/quadlet_demo.key",
"key_content": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCwshviouhP0AeW\nniffTi+YpgAVfUiashaqVMGJmH1jxgbzD4iJULFZ+fyahkvH86iAZx1Qqz/SI0Xd\nuyfRf/Mbsp+xBny4PnihYaoik2JuaAmrjtj/p+nQMno/byw0ZvqSVX93+K/xg6af\nY6lofbOtRci2i6q4Rx9G6h34DU9QBiyHgHlTnge/mTXSYRJBa4U2CHAcFrIo/Sr5\njer+HZFNjZHajDS7ikHIGehKCCfhnEt2+5tTfWTCZlxT85IZldTETuPUSitmXnUw\nX4vCI+phK/XlNmWz+GNcH1WjlG3d+BqfdqTWT0NPIMA6L4jdTY6mhGjRTrXUnJ8Z\nb6BPAlEtAgMBAAECggEAXLNk29daCB19DbE1+/raD7zviFatU7OhHoXO7/BXwJOx\nK633lQsHsMvtkehpM6MKwSrX/22I0yd8TSMWddwp4FsrWsWrg0ILKafCiddHKeNd\nR2tvP1mJ2pfQ+RzLVUa9qS4/7OJE9HF8Zlz0EQT50istbVADJiDyMNM9G9lnxfRH\ngLeLvgDwnI0t1NjUHkT0HJvbIuDKmZd/1joarneS0LicPoANHEvA4hdx6ZNRIQDj\nvoxTeY3yBP67i2bjJpC7On1M4yhIOnP3RDWUQ5FGKC22QFjISHidm6SlOEaFmij0\n9QmyYo92Nc3+ocVjUwcBXTDlSMtz5ld4g99YAAnXxQKBgQDkvKvOAZfhuxUFWVhy\niOkHx34gQa6YJ7ZGp+VRy2K6p/qae/pB6fho46LQDW4wA17Z4wh3Ac2vfnuCba0m\nQhZFBZmj9g94eTLpj/0j+3M80ykrsGCwUFGtgaV9Upn3+eR2ZIYyyctw88kRkWdD\ny1nQskLw659FH2bQjBRsr5WghwKBgQDFwYfo5BP+HQOFJpWVSAeID1HLc3rj/BgW\nBR5upmyXV7SZGcGYbVx4BQl5VXfEAAcZvVixAOJc/c35LV6MUsR8hNhDV3xwr3mT\npsgA8dcqfrYXFMYoVs0L/xOLVmSoO4ryrxW1EHRrKN4fhUJtle3m28PyqV0OwYAA\nsgEW18fxqwKBgQCXGPr0zqCiAI9WRTrkDgTBmsp6TGklIbaK3gXiDlRdsPEec59R\nQ1lREG5QuU58cP2LOSoyUwKoVwkHk6O5jhZf6d8gLD7z+jvjhbZWupfZKjQcJBr6\n3VPXJblIG/SRloWQn772dc7qAmBkNoTch8wAzIDj3QINCz1DrgUf9q0EqQKBgB8g\n04/pNuxtDLzM0gbpi6XzI2DvEI0KhZARxjSMpdM8zwS6dSrVLBUIVfFvbqa/il6W\n23shNICGwwFSDuvvq1YeYxxWjRP/yxdezWS4rLHXuDnx3y9CWoxk3mK0equwk7KX\ntMcHjAvCJxS3GTm+u+/27+Umm6tS8h8p5dn+pU55AoGAPyAzEJ7cpFA/qXlPAMAf\neCto5VCy9HiKbvt7cgWBryUTpubiP6A4mSw2bFdwQAdkt/Q9VuACEvWIIHIWdJQc\nVQ6JdjWdQUYU0vMykRbSUlil1U9zNRtKH817GzH5iHQVzowkCygqiFcudnKtuLe/\nL0w9B4VIJHb1ZMYBQfnDVz4=\n-----END PRIVATE KEY-----\n"
}
}
},
"changed": false
}
TASK [fedora.linux_system_roles.certificate : Stop tracking certificates] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:176
Saturday 10 August 2024 12:46:48 -0400 (0:00:00.045) 0:00:12.804 *******
ok: [managed_node1] => (item={'cert': '/etc/pki/tls/certs/quadlet_demo.crt', 'cert_content': '-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRAKreS3B3D0KWnhz5Caorag0wDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMI2Fh\nZGU0YjcwLTc3MGY0Mjk2LTllMWNmOTA5LWFhMmI2YTBiMB4XDTI0MDgxMDE2NDY0\nN1oXDTI1MDgxMDE2MzA0MlowFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsLIb4qLoT9AHlp4n304vmKYAFX1ImrIW\nqlTBiZh9Y8YG8w+IiVCxWfn8moZLx/OogGcdUKs/0iNF3bsn0X/zG7KfsQZ8uD54\noWGqIpNibmgJq47Y/6fp0DJ6P28sNGb6klV/d/iv8YOmn2OpaH2zrUXItouquEcf\nRuod+A1PUAYsh4B5U54Hv5k10mESQWuFNghwHBayKP0q+Y3q/h2RTY2R2ow0u4pB\nyBnoSggn4ZxLdvubU31kwmZcU/OSGZXUxE7j1EorZl51MF+LwiPqYSv15TZls/hj\nXB9Vo5Rt3fgan3ak1k9DTyDAOi+I3U2OpoRo0U611JyfGW+gTwJRLQIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFOEe\nLum9cE549t987loV4ias8pc1MB8GA1UdIwQYMBaAFBUQ/Ou3bMAoFSLOIf8eo6qy\nRVE7MA0GCSqGSIb3DQEBCwUAA4IBAQCxZ/1FEqWbUeYqU/S+r+8NR6cD5dLgBtId\nONOrDeeGwD28+B7+RjJSuslZn7vHqj4tgwER8d8/rjV26E5LvXbUynUI2T/mMSfq\nZ2qgKcZAW8mmiJWUWByigTDAghJiqyMQTkv5M+UW+qMdZdktSa5SO53bo0qcOnJt\nxtySVbLRUDdPPW1669SB58Dx+SrJoPoVvU/Hh6tEbIMRlDXUshlAP53F9i5r8L3M\nFKX6sgmBIf5b0fqJIr0m7ubaGXe5YjWJj1T54jKXf1RVqd+FDpVoiBCNXmsnzdtu\nueiO3MDVoHhzczrUMo2O0mWQT32sqMAoxHde9O8DdMo9kbj20yUa\n-----END CERTIFICATE-----\n', 'key': '/etc/pki/tls/private/quadlet_demo.key', 'key_content': '-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCwshviouhP0AeW\nniffTi+YpgAVfUiashaqVMGJmH1jxgbzD4iJULFZ+fyahkvH86iAZx1Qqz/SI0Xd\nuyfRf/Mbsp+xBny4PnihYaoik2JuaAmrjtj/p+nQMno/byw0ZvqSVX93+K/xg6af\nY6lofbOtRci2i6q4Rx9G6h34DU9QBiyHgHlTnge/mTXSYRJBa4U2CHAcFrIo/Sr5\njer+HZFNjZHajDS7ikHIGehKCCfhnEt2+5tTfWTCZlxT85IZldTETuPUSitmXnUw\nX4vCI+phK/XlNmWz+GNcH1WjlG3d+BqfdqTWT0NPIMA6L4jdTY6mhGjRTrXUnJ8Z\nb6BPAlEtAgMBAAECggEAXLNk29daCB19DbE1+/raD7zviFatU7OhHoXO7/BXwJOx\nK633lQsHsMvtkehpM6MKwSrX/22I0yd8TSMWddwp4FsrWsWrg0ILKafCiddHKeNd\nR2tvP1mJ2pfQ+RzLVUa9qS4/7OJE9HF8Zlz0EQT50istbVADJiDyMNM9G9lnxfRH\ngLeLvgDwnI0t1NjUHkT0HJvbIuDKmZd/1joarneS0LicPoANHEvA4hdx6ZNRIQDj\nvoxTeY3yBP67i2bjJpC7On1M4yhIOnP3RDWUQ5FGKC22QFjISHidm6SlOEaFmij0\n9QmyYo92Nc3+ocVjUwcBXTDlSMtz5ld4g99YAAnXxQKBgQDkvKvOAZfhuxUFWVhy\niOkHx34gQa6YJ7ZGp+VRy2K6p/qae/pB6fho46LQDW4wA17Z4wh3Ac2vfnuCba0m\nQhZFBZmj9g94eTLpj/0j+3M80ykrsGCwUFGtgaV9Upn3+eR2ZIYyyctw88kRkWdD\ny1nQskLw659FH2bQjBRsr5WghwKBgQDFwYfo5BP+HQOFJpWVSAeID1HLc3rj/BgW\nBR5upmyXV7SZGcGYbVx4BQl5VXfEAAcZvVixAOJc/c35LV6MUsR8hNhDV3xwr3mT\npsgA8dcqfrYXFMYoVs0L/xOLVmSoO4ryrxW1EHRrKN4fhUJtle3m28PyqV0OwYAA\nsgEW18fxqwKBgQCXGPr0zqCiAI9WRTrkDgTBmsp6TGklIbaK3gXiDlRdsPEec59R\nQ1lREG5QuU58cP2LOSoyUwKoVwkHk6O5jhZf6d8gLD7z+jvjhbZWupfZKjQcJBr6\n3VPXJblIG/SRloWQn772dc7qAmBkNoTch8wAzIDj3QINCz1DrgUf9q0EqQKBgB8g\n04/pNuxtDLzM0gbpi6XzI2DvEI0KhZARxjSMpdM8zwS6dSrVLBUIVfFvbqa/il6W\n23shNICGwwFSDuvvq1YeYxxWjRP/yxdezWS4rLHXuDnx3y9CWoxk3mK0equwk7KX\ntMcHjAvCJxS3GTm+u+/27+Umm6tS8h8p5dn+pU55AoGAPyAzEJ7cpFA/qXlPAMAf\neCto5VCy9HiKbvt7cgWBryUTpubiP6A4mSw2bFdwQAdkt/Q9VuACEvWIIHIWdJQc\nVQ6JdjWdQUYU0vMykRbSUlil1U9zNRtKH817GzH5iHQVzowkCygqiFcudnKtuLe/\nL0w9B4VIJHb1ZMYBQfnDVz4=\n-----END PRIVATE KEY-----\n', 'ca': '/etc/pki/tls/certs/quadlet_demo.crt', 'ca_content': '-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRAKreS3B3D0KWnhz5Caorag0wDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMI2Fh\nZGU0YjcwLTc3MGY0Mjk2LTllMWNmOTA5LWFhMmI2YTBiMB4XDTI0MDgxMDE2NDY0\nN1oXDTI1MDgxMDE2MzA0MlowFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsLIb4qLoT9AHlp4n304vmKYAFX1ImrIW\nqlTBiZh9Y8YG8w+IiVCxWfn8moZLx/OogGcdUKs/0iNF3bsn0X/zG7KfsQZ8uD54\noWGqIpNibmgJq47Y/6fp0DJ6P28sNGb6klV/d/iv8YOmn2OpaH2zrUXItouquEcf\nRuod+A1PUAYsh4B5U54Hv5k10mESQWuFNghwHBayKP0q+Y3q/h2RTY2R2ow0u4pB\nyBnoSggn4ZxLdvubU31kwmZcU/OSGZXUxE7j1EorZl51MF+LwiPqYSv15TZls/hj\nXB9Vo5Rt3fgan3ak1k9DTyDAOi+I3U2OpoRo0U611JyfGW+gTwJRLQIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFOEe\nLum9cE549t987loV4ias8pc1MB8GA1UdIwQYMBaAFBUQ/Ou3bMAoFSLOIf8eo6qy\nRVE7MA0GCSqGSIb3DQEBCwUAA4IBAQCxZ/1FEqWbUeYqU/S+r+8NR6cD5dLgBtId\nONOrDeeGwD28+B7+RjJSuslZn7vHqj4tgwER8d8/rjV26E5LvXbUynUI2T/mMSfq\nZ2qgKcZAW8mmiJWUWByigTDAghJiqyMQTkv5M+UW+qMdZdktSa5SO53bo0qcOnJt\nxtySVbLRUDdPPW1669SB58Dx+SrJoPoVvU/Hh6tEbIMRlDXUshlAP53F9i5r8L3M\nFKX6sgmBIf5b0fqJIr0m7ubaGXe5YjWJj1T54jKXf1RVqd+FDpVoiBCNXmsnzdtu\nueiO3MDVoHhzczrUMo2O0mWQT32sqMAoxHde9O8DdMo9kbj20yUa\n-----END CERTIFICATE-----\n'}) => {
"ansible_loop_var": "item",
"changed": false,
"cmd": [
"getcert",
"stop-tracking",
"-f",
"/etc/pki/tls/certs/quadlet_demo.crt"
],
"delta": "0:00:00.035304",
"end": "2024-08-10 12:46:49.376970",
"item": {
"ca": "/etc/pki/tls/certs/quadlet_demo.crt",
"ca_content": "-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRAKreS3B3D0KWnhz5Caorag0wDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMI2Fh\nZGU0YjcwLTc3MGY0Mjk2LTllMWNmOTA5LWFhMmI2YTBiMB4XDTI0MDgxMDE2NDY0\nN1oXDTI1MDgxMDE2MzA0MlowFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsLIb4qLoT9AHlp4n304vmKYAFX1ImrIW\nqlTBiZh9Y8YG8w+IiVCxWfn8moZLx/OogGcdUKs/0iNF3bsn0X/zG7KfsQZ8uD54\noWGqIpNibmgJq47Y/6fp0DJ6P28sNGb6klV/d/iv8YOmn2OpaH2zrUXItouquEcf\nRuod+A1PUAYsh4B5U54Hv5k10mESQWuFNghwHBayKP0q+Y3q/h2RTY2R2ow0u4pB\nyBnoSggn4ZxLdvubU31kwmZcU/OSGZXUxE7j1EorZl51MF+LwiPqYSv15TZls/hj\nXB9Vo5Rt3fgan3ak1k9DTyDAOi+I3U2OpoRo0U611JyfGW+gTwJRLQIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFOEe\nLum9cE549t987loV4ias8pc1MB8GA1UdIwQYMBaAFBUQ/Ou3bMAoFSLOIf8eo6qy\nRVE7MA0GCSqGSIb3DQEBCwUAA4IBAQCxZ/1FEqWbUeYqU/S+r+8NR6cD5dLgBtId\nONOrDeeGwD28+B7+RjJSuslZn7vHqj4tgwER8d8/rjV26E5LvXbUynUI2T/mMSfq\nZ2qgKcZAW8mmiJWUWByigTDAghJiqyMQTkv5M+UW+qMdZdktSa5SO53bo0qcOnJt\nxtySVbLRUDdPPW1669SB58Dx+SrJoPoVvU/Hh6tEbIMRlDXUshlAP53F9i5r8L3M\nFKX6sgmBIf5b0fqJIr0m7ubaGXe5YjWJj1T54jKXf1RVqd+FDpVoiBCNXmsnzdtu\nueiO3MDVoHhzczrUMo2O0mWQT32sqMAoxHde9O8DdMo9kbj20yUa\n-----END CERTIFICATE-----\n",
"cert": "/etc/pki/tls/certs/quadlet_demo.crt",
"cert_content": "-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRAKreS3B3D0KWnhz5Caorag0wDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMI2Fh\nZGU0YjcwLTc3MGY0Mjk2LTllMWNmOTA5LWFhMmI2YTBiMB4XDTI0MDgxMDE2NDY0\nN1oXDTI1MDgxMDE2MzA0MlowFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsLIb4qLoT9AHlp4n304vmKYAFX1ImrIW\nqlTBiZh9Y8YG8w+IiVCxWfn8moZLx/OogGcdUKs/0iNF3bsn0X/zG7KfsQZ8uD54\noWGqIpNibmgJq47Y/6fp0DJ6P28sNGb6klV/d/iv8YOmn2OpaH2zrUXItouquEcf\nRuod+A1PUAYsh4B5U54Hv5k10mESQWuFNghwHBayKP0q+Y3q/h2RTY2R2ow0u4pB\nyBnoSggn4ZxLdvubU31kwmZcU/OSGZXUxE7j1EorZl51MF+LwiPqYSv15TZls/hj\nXB9Vo5Rt3fgan3ak1k9DTyDAOi+I3U2OpoRo0U611JyfGW+gTwJRLQIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFOEe\nLum9cE549t987loV4ias8pc1MB8GA1UdIwQYMBaAFBUQ/Ou3bMAoFSLOIf8eo6qy\nRVE7MA0GCSqGSIb3DQEBCwUAA4IBAQCxZ/1FEqWbUeYqU/S+r+8NR6cD5dLgBtId\nONOrDeeGwD28+B7+RjJSuslZn7vHqj4tgwER8d8/rjV26E5LvXbUynUI2T/mMSfq\nZ2qgKcZAW8mmiJWUWByigTDAghJiqyMQTkv5M+UW+qMdZdktSa5SO53bo0qcOnJt\nxtySVbLRUDdPPW1669SB58Dx+SrJoPoVvU/Hh6tEbIMRlDXUshlAP53F9i5r8L3M\nFKX6sgmBIf5b0fqJIr0m7ubaGXe5YjWJj1T54jKXf1RVqd+FDpVoiBCNXmsnzdtu\nueiO3MDVoHhzczrUMo2O0mWQT32sqMAoxHde9O8DdMo9kbj20yUa\n-----END CERTIFICATE-----\n",
"key": "/etc/pki/tls/private/quadlet_demo.key",
"key_content": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCwshviouhP0AeW\nniffTi+YpgAVfUiashaqVMGJmH1jxgbzD4iJULFZ+fyahkvH86iAZx1Qqz/SI0Xd\nuyfRf/Mbsp+xBny4PnihYaoik2JuaAmrjtj/p+nQMno/byw0ZvqSVX93+K/xg6af\nY6lofbOtRci2i6q4Rx9G6h34DU9QBiyHgHlTnge/mTXSYRJBa4U2CHAcFrIo/Sr5\njer+HZFNjZHajDS7ikHIGehKCCfhnEt2+5tTfWTCZlxT85IZldTETuPUSitmXnUw\nX4vCI+phK/XlNmWz+GNcH1WjlG3d+BqfdqTWT0NPIMA6L4jdTY6mhGjRTrXUnJ8Z\nb6BPAlEtAgMBAAECggEAXLNk29daCB19DbE1+/raD7zviFatU7OhHoXO7/BXwJOx\nK633lQsHsMvtkehpM6MKwSrX/22I0yd8TSMWddwp4FsrWsWrg0ILKafCiddHKeNd\nR2tvP1mJ2pfQ+RzLVUa9qS4/7OJE9HF8Zlz0EQT50istbVADJiDyMNM9G9lnxfRH\ngLeLvgDwnI0t1NjUHkT0HJvbIuDKmZd/1joarneS0LicPoANHEvA4hdx6ZNRIQDj\nvoxTeY3yBP67i2bjJpC7On1M4yhIOnP3RDWUQ5FGKC22QFjISHidm6SlOEaFmij0\n9QmyYo92Nc3+ocVjUwcBXTDlSMtz5ld4g99YAAnXxQKBgQDkvKvOAZfhuxUFWVhy\niOkHx34gQa6YJ7ZGp+VRy2K6p/qae/pB6fho46LQDW4wA17Z4wh3Ac2vfnuCba0m\nQhZFBZmj9g94eTLpj/0j+3M80ykrsGCwUFGtgaV9Upn3+eR2ZIYyyctw88kRkWdD\ny1nQskLw659FH2bQjBRsr5WghwKBgQDFwYfo5BP+HQOFJpWVSAeID1HLc3rj/BgW\nBR5upmyXV7SZGcGYbVx4BQl5VXfEAAcZvVixAOJc/c35LV6MUsR8hNhDV3xwr3mT\npsgA8dcqfrYXFMYoVs0L/xOLVmSoO4ryrxW1EHRrKN4fhUJtle3m28PyqV0OwYAA\nsgEW18fxqwKBgQCXGPr0zqCiAI9WRTrkDgTBmsp6TGklIbaK3gXiDlRdsPEec59R\nQ1lREG5QuU58cP2LOSoyUwKoVwkHk6O5jhZf6d8gLD7z+jvjhbZWupfZKjQcJBr6\n3VPXJblIG/SRloWQn772dc7qAmBkNoTch8wAzIDj3QINCz1DrgUf9q0EqQKBgB8g\n04/pNuxtDLzM0gbpi6XzI2DvEI0KhZARxjSMpdM8zwS6dSrVLBUIVfFvbqa/il6W\n23shNICGwwFSDuvvq1YeYxxWjRP/yxdezWS4rLHXuDnx3y9CWoxk3mK0equwk7KX\ntMcHjAvCJxS3GTm+u+/27+Umm6tS8h8p5dn+pU55AoGAPyAzEJ7cpFA/qXlPAMAf\neCto5VCy9HiKbvt7cgWBryUTpubiP6A4mSw2bFdwQAdkt/Q9VuACEvWIIHIWdJQc\nVQ6JdjWdQUYU0vMykRbSUlil1U9zNRtKH817GzH5iHQVzowkCygqiFcudnKtuLe/\nL0w9B4VIJHb1ZMYBQfnDVz4=\n-----END PRIVATE KEY-----\n"
},
"rc": 0,
"start": "2024-08-10 12:46:49.341666"
}
STDOUT:
Request "20240810164647" removed.
TASK [fedora.linux_system_roles.certificate : Remove files] ********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:181
Saturday 10 August 2024 12:46:49 -0400 (0:00:00.508) 0:00:13.313 *******
changed: [managed_node1] => (item=/etc/pki/tls/certs/quadlet_demo.crt) => {
"ansible_loop_var": "item",
"changed": true,
"item": "/etc/pki/tls/certs/quadlet_demo.crt",
"path": "/etc/pki/tls/certs/quadlet_demo.crt",
"state": "absent"
}
changed: [managed_node1] => (item=/etc/pki/tls/private/quadlet_demo.key) => {
"ansible_loop_var": "item",
"changed": true,
"item": "/etc/pki/tls/private/quadlet_demo.key",
"path": "/etc/pki/tls/private/quadlet_demo.key",
"state": "absent"
}
ok: [managed_node1] => (item=/etc/pki/tls/certs/quadlet_demo.crt) => {
"ansible_loop_var": "item",
"changed": false,
"item": "/etc/pki/tls/certs/quadlet_demo.crt",
"path": "/etc/pki/tls/certs/quadlet_demo.crt",
"state": "absent"
}
TASK [Run the role] ************************************************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:50
Saturday 10 August 2024 12:46:50 -0400 (0:00:01.066) 0:00:14.379 *******
TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3
Saturday 10 August 2024 12:46:50 -0400 (0:00:00.082) 0:00:14.461 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] ****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3
Saturday 10 August 2024 12:46:50 -0400 (0:00:00.027) 0:00:14.489 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11
Saturday 10 August 2024 12:46:50 -0400 (0:00:00.040) 0:00:14.529 *******
ok: [managed_node1] => {
"changed": false,
"stat": {
"exists": false
}
}
TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16
Saturday 10 August 2024 12:46:51 -0400 (0:00:00.367) 0:00:14.897 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_is_ostree": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:20
Saturday 10 August 2024 12:46:51 -0400 (0:00:00.041) 0:00:14.938 *******
ok: [managed_node1] => (item=RedHat.yml) => {
"ansible_facts": {
"__podman_packages": [
"podman",
"shadow-utils-subid"
]
},
"ansible_included_var_files": [
"/tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml"
],
"ansible_loop_var": "item",
"changed": false,
"item": "RedHat.yml"
}
skipping: [managed_node1] => (item=CentOS.yml) => {
"ansible_loop_var": "item",
"changed": false,
"item": "CentOS.yml",
"skip_reason": "Conditional result was False"
}
ok: [managed_node1] => (item=CentOS_8.yml) => {
"ansible_facts": {
"__podman_packages": [
"crun",
"podman",
"podman-plugins",
"shadow-utils-subid"
]
},
"ansible_included_var_files": [
"/tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml"
],
"ansible_loop_var": "item",
"changed": false,
"item": "CentOS_8.yml"
}
ok: [managed_node1] => (item=CentOS_8.yml) => {
"ansible_facts": {
"__podman_packages": [
"crun",
"podman",
"podman-plugins",
"shadow-utils-subid"
]
},
"ansible_included_var_files": [
"/tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml"
],
"ansible_loop_var": "item",
"changed": false,
"item": "CentOS_8.yml"
}
TASK [fedora.linux_system_roles.podman : Gather the package facts] *************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6
Saturday 10 August 2024 12:46:51 -0400 (0:00:00.070) 0:00:15.009 *******
ok: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Enable copr if requested] *************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10
Saturday 10 August 2024 12:46:52 -0400 (0:00:01.739) 0:00:16.748 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14
Saturday 10 August 2024 12:46:52 -0400 (0:00:00.053) 0:00:16.801 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get podman version] *******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:22
Saturday 10 August 2024 12:46:52 -0400 (0:00:00.059) 0:00:16.861 *******
ok: [managed_node1] => {
"changed": false,
"cmd": [
"podman",
"--version"
],
"delta": "0:00:00.030714",
"end": "2024-08-10 12:46:53.304283",
"rc": 0,
"start": "2024-08-10 12:46:53.273569"
}
STDOUT:
podman version 4.9.4-dev
TASK [fedora.linux_system_roles.podman : Set podman version] *******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28
Saturday 10 August 2024 12:46:53 -0400 (0:00:00.390) 0:00:17.251 *******
ok: [managed_node1] => {
"ansible_facts": {
"podman_version": "4.9.4-dev"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:32
Saturday 10 August 2024 12:46:53 -0400 (0:00:00.053) 0:00:17.305 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:39
Saturday 10 August 2024 12:46:53 -0400 (0:00:00.055) 0:00:17.360 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
META: end_host conditional evaluated to false, continuing execution for managed_node1
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56
Saturday 10 August 2024 12:46:53 -0400 (0:00:00.191) 0:00:17.551 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 10 August 2024 12:46:53 -0400 (0:00:00.065) 0:00:17.617 *******
ok: [managed_node1] => {
"ansible_facts": {
"getent_passwd": {
"root": [
"x",
"0",
"0",
"root",
"/root",
"/bin/bash"
]
}
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 10 August 2024 12:46:54 -0400 (0:00:00.485) 0:00:18.102 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 10 August 2024 12:46:54 -0400 (0:00:00.038) 0:00:18.141 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get group information] ****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Saturday 10 August 2024 12:46:54 -0400 (0:00:00.065) 0:00:18.206 *******
ok: [managed_node1] => {
"ansible_facts": {
"getent_group": {
"root": [
"x",
"0",
""
]
}
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set group name] ***********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35
Saturday 10 August 2024 12:46:54 -0400 (0:00:00.377) 0:00:18.583 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_group_name": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Saturday 10 August 2024 12:46:54 -0400 (0:00:00.041) 0:00:18.624 *******
ok: [managed_node1] => {
"changed": false,
"stat": {
"atime": 1723307429.1990302,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b",
"ctime": 1723307390.9915028,
"dev": 51713,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 6986657,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-sharedlib",
"mode": "0755",
"mtime": 1700557386.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 12640,
"uid": 0,
"version": "1934096266",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check user with getsubids] ************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:50
Saturday 10 August 2024 12:46:55 -0400 (0:00:00.373) 0:00:18.998 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check group with getsubids] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:55
Saturday 10 August 2024 12:46:55 -0400 (0:00:00.032) 0:00:19.030 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:60
Saturday 10 August 2024 12:46:55 -0400 (0:00:00.031) 0:00:19.062 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74
Saturday 10 August 2024 12:46:55 -0400 (0:00:00.032) 0:00:19.094 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79
Saturday 10 August 2024 12:46:55 -0400 (0:00:00.032) 0:00:19.127 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:84
Saturday 10 August 2024 12:46:55 -0400 (0:00:00.031) 0:00:19.158 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:94
Saturday 10 August 2024 12:46:55 -0400 (0:00:00.032) 0:00:19.190 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if group not in subgid file] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:101
Saturday 10 August 2024 12:46:55 -0400 (0:00:00.030) 0:00:19.221 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set config file paths] ****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:62
Saturday 10 August 2024 12:46:55 -0400 (0:00:00.031) 0:00:19.253 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf",
"__podman_policy_json_file": "/etc/containers/policy.json",
"__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf",
"__podman_storage_conf_file": "/etc/containers/storage.conf"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle container.conf.d] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:71
Saturday 10 August 2024 12:46:55 -0400 (0:00:00.067) 0:00:19.320 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5
Saturday 10 August 2024 12:46:55 -0400 (0:00:00.061) 0:00:19.382 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Update container config file] *********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13
Saturday 10 August 2024 12:46:55 -0400 (0:00:00.032) 0:00:19.414 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] *************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:74
Saturday 10 August 2024 12:46:55 -0400 (0:00:00.058) 0:00:19.473 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5
Saturday 10 August 2024 12:46:55 -0400 (0:00:00.063) 0:00:19.537 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Update registries config file] ********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13
Saturday 10 August 2024 12:46:55 -0400 (0:00:00.032) 0:00:19.569 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Handle storage.conf] ******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:77
Saturday 10 August 2024 12:46:55 -0400 (0:00:00.032) 0:00:19.602 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5
Saturday 10 August 2024 12:46:55 -0400 (0:00:00.063) 0:00:19.665 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Update storage config file] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13
Saturday 10 August 2024 12:46:55 -0400 (0:00:00.031) 0:00:19.697 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Handle policy.json] *******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80
Saturday 10 August 2024 12:46:55 -0400 (0:00:00.031) 0:00:19.729 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6
Saturday 10 August 2024 12:46:55 -0400 (0:00:00.064) 0:00:19.794 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14
Saturday 10 August 2024 12:46:55 -0400 (0:00:00.032) 0:00:19.826 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get the existing policy.json] *********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19
Saturday 10 August 2024 12:46:55 -0400 (0:00:00.032) 0:00:19.858 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Write new policy.json file] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25
Saturday 10 August 2024 12:46:56 -0400 (0:00:00.031) 0:00:19.889 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [Manage firewall for specified ports] *************************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:86
Saturday 10 August 2024 12:46:56 -0400 (0:00:00.031) 0:00:19.921 *******
TASK [fedora.linux_system_roles.firewall : Setup firewalld] ********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2
Saturday 10 August 2024 12:46:56 -0400 (0:00:00.134) 0:00:20.056 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed_node1
TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2
Saturday 10 August 2024 12:46:56 -0400 (0:00:00.059) 0:00:20.116 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Check if system is ostree] **********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10
Saturday 10 August 2024 12:46:56 -0400 (0:00:00.041) 0:00:20.157 *******
ok: [managed_node1] => {
"changed": false,
"stat": {
"exists": false
}
}
TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15
Saturday 10 August 2024 12:46:56 -0400 (0:00:00.363) 0:00:20.521 *******
ok: [managed_node1] => {
"ansible_facts": {
"__firewall_is_ostree": false
},
"changed": false
}
TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22
Saturday 10 August 2024 12:46:56 -0400 (0:00:00.037) 0:00:20.558 *******
ok: [managed_node1] => {
"changed": false,
"stat": {
"exists": false
}
}
TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27
Saturday 10 August 2024 12:46:57 -0400 (0:00:00.362) 0:00:20.921 *******
ok: [managed_node1] => {
"ansible_facts": {
"__firewall_is_transactional": false
},
"changed": false
}
TASK [fedora.linux_system_roles.firewall : Install firewalld] ******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31
Saturday 10 August 2024 12:46:57 -0400 (0:00:00.037) 0:00:20.958 *******
ok: [managed_node1] => {
"changed": false,
"rc": 0,
"results": []
}
MSG:
Nothing to do
lsrpackages: firewalld
TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:43
Saturday 10 August 2024 12:47:00 -0400 (0:00:03.287) 0:00:24.246 *******
skipping: [managed_node1] => {}
TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:48
Saturday 10 August 2024 12:47:00 -0400 (0:00:00.033) 0:00:24.280 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:53
Saturday 10 August 2024 12:47:00 -0400 (0:00:00.032) 0:00:24.312 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Collect service facts] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5
Saturday 10 August 2024 12:47:00 -0400 (0:00:00.032) 0:00:24.345 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:9
Saturday 10 August 2024 12:47:00 -0400 (0:00:00.032) 0:00:24.378 *******
skipping: [managed_node1] => (item=nftables) => {
"ansible_loop_var": "item",
"changed": false,
"item": "nftables",
"skip_reason": "Conditional result was False"
}
skipping: [managed_node1] => (item=iptables) => {
"ansible_loop_var": "item",
"changed": false,
"item": "iptables",
"skip_reason": "Conditional result was False"
}
skipping: [managed_node1] => (item=ufw) => {
"ansible_loop_var": "item",
"changed": false,
"item": "ufw",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22
Saturday 10 August 2024 12:47:00 -0400 (0:00:00.043) 0:00:24.422 *******
ok: [managed_node1] => {
"changed": false,
"name": "firewalld",
"status": {
"ActiveEnterTimestamp": "Sat 2024-08-10 12:42:21 EDT",
"ActiveEnterTimestampMonotonic": "6667579",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "active",
"After": "dbus.service polkit.service basic.target dbus.socket system.slice sysinit.target",
"AllowIsolate": "no",
"AllowedCPUs": "",
"AllowedMemoryNodes": "",
"AmbientCapabilities": "",
"AssertResult": "yes",
"AssertTimestamp": "Sat 2024-08-10 12:42:20 EDT",
"AssertTimestampMonotonic": "5273975",
"Before": "network-pre.target shutdown.target multi-user.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"BusName": "org.fedoraproject.FirewallD1",
"CPUAccounting": "no",
"CPUAffinity": "",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "yes",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf",
"CollectMode": "inactive",
"ConditionResult": "yes",
"ConditionTimestamp": "Sat 2024-08-10 12:42:20 EDT",
"ConditionTimestampMonotonic": "5273973",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "ipset.service nftables.service ebtables.service iptables.service shutdown.target ip6tables.service",
"ControlGroup": "/system.slice/firewalld.service",
"ControlPID": "0",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "firewalld - dynamic firewall daemon",
"DevicePolicy": "auto",
"Documentation": "man:firewalld(1)",
"DynamicUser": "no",
"EffectiveCPUs": "",
"EffectiveMemoryNodes": "",
"EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "582",
"ExecMainStartTimestamp": "Sat 2024-08-10 12:42:20 EDT",
"ExecMainStartTimestampMonotonic": "5276069",
"ExecMainStatus": "0",
"ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FragmentPath": "/usr/lib/systemd/system/firewalld.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOSchedulingClass": "0",
"IOSchedulingPriority": "0",
"IOWeight": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "18446744073709551615",
"IPEgressPackets": "18446744073709551615",
"IPIngressBytes": "18446744073709551615",
"IPIngressPackets": "18446744073709551615",
"Id": "firewalld.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestamp": "Sat 2024-08-10 12:42:20 EDT",
"InactiveExitTimestampMonotonic": "5276133",
"InvocationID": "cdc8f49a1be047faa5ce1c8fd76ca6b9",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "0",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "65536",
"LimitMEMLOCKSoft": "65536",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "262144",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14003",
"LimitNPROCSoft": "14003",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14003",
"LimitSIGPENDINGSoft": "14003",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "582",
"MemoryAccounting": "yes",
"MemoryCurrent": "49852416",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"MountFlags": "",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAMask": "",
"NUMAPolicy": "n/a",
"Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"PermissionsStartOnly": "no",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "sysinit.target dbus.socket system.slice",
"Restart": "no",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"StandardError": "null",
"StandardInput": "null",
"StandardInputData": "",
"StandardOutput": "null",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestamp": "Sat 2024-08-10 12:42:21 EDT",
"StateChangeTimestampMonotonic": "6667579",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "running",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "0",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "2",
"TasksMax": "22405",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "dbus",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "enabled",
"UnitFileState": "enabled",
"UtmpMode": "init",
"WantedBy": "multi-user.target",
"Wants": "network-pre.target",
"WatchdogTimestamp": "Sat 2024-08-10 12:42:21 EDT",
"WatchdogTimestampMonotonic": "6667576",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28
Saturday 10 August 2024 12:47:01 -0400 (0:00:00.512) 0:00:24.934 *******
ok: [managed_node1] => {
"changed": false,
"enabled": true,
"name": "firewalld",
"state": "started",
"status": {
"ActiveEnterTimestamp": "Sat 2024-08-10 12:42:21 EDT",
"ActiveEnterTimestampMonotonic": "6667579",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "active",
"After": "dbus.service polkit.service basic.target dbus.socket system.slice sysinit.target",
"AllowIsolate": "no",
"AllowedCPUs": "",
"AllowedMemoryNodes": "",
"AmbientCapabilities": "",
"AssertResult": "yes",
"AssertTimestamp": "Sat 2024-08-10 12:42:20 EDT",
"AssertTimestampMonotonic": "5273975",
"Before": "network-pre.target shutdown.target multi-user.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"BusName": "org.fedoraproject.FirewallD1",
"CPUAccounting": "no",
"CPUAffinity": "",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "yes",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf",
"CollectMode": "inactive",
"ConditionResult": "yes",
"ConditionTimestamp": "Sat 2024-08-10 12:42:20 EDT",
"ConditionTimestampMonotonic": "5273973",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "ipset.service nftables.service ebtables.service iptables.service shutdown.target ip6tables.service",
"ControlGroup": "/system.slice/firewalld.service",
"ControlPID": "0",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "firewalld - dynamic firewall daemon",
"DevicePolicy": "auto",
"Documentation": "man:firewalld(1)",
"DynamicUser": "no",
"EffectiveCPUs": "",
"EffectiveMemoryNodes": "",
"EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "582",
"ExecMainStartTimestamp": "Sat 2024-08-10 12:42:20 EDT",
"ExecMainStartTimestampMonotonic": "5276069",
"ExecMainStatus": "0",
"ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FragmentPath": "/usr/lib/systemd/system/firewalld.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOSchedulingClass": "0",
"IOSchedulingPriority": "0",
"IOWeight": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "18446744073709551615",
"IPEgressPackets": "18446744073709551615",
"IPIngressBytes": "18446744073709551615",
"IPIngressPackets": "18446744073709551615",
"Id": "firewalld.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestamp": "Sat 2024-08-10 12:42:20 EDT",
"InactiveExitTimestampMonotonic": "5276133",
"InvocationID": "cdc8f49a1be047faa5ce1c8fd76ca6b9",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "0",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "65536",
"LimitMEMLOCKSoft": "65536",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "262144",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14003",
"LimitNPROCSoft": "14003",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14003",
"LimitSIGPENDINGSoft": "14003",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "582",
"MemoryAccounting": "yes",
"MemoryCurrent": "49852416",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"MountFlags": "",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAMask": "",
"NUMAPolicy": "n/a",
"Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"PermissionsStartOnly": "no",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "sysinit.target dbus.socket system.slice",
"Restart": "no",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"StandardError": "null",
"StandardInput": "null",
"StandardInputData": "",
"StandardOutput": "null",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestamp": "Sat 2024-08-10 12:42:21 EDT",
"StateChangeTimestampMonotonic": "6667579",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "running",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "0",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "2",
"TasksMax": "22405",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "dbus",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "enabled",
"UnitFileState": "enabled",
"UtmpMode": "init",
"WantedBy": "multi-user.target",
"Wants": "network-pre.target",
"WatchdogTimestamp": "Sat 2024-08-10 12:42:21 EDT",
"WatchdogTimestampMonotonic": "6667576",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:34
Saturday 10 August 2024 12:47:01 -0400 (0:00:00.507) 0:00:25.441 *******
ok: [managed_node1] => {
"ansible_facts": {
"__firewall_previous_replaced": false,
"__firewall_python_cmd": "/usr/libexec/platform-python",
"__firewall_report_changed": true
},
"changed": false
}
TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:43
Saturday 10 August 2024 12:47:01 -0400 (0:00:00.041) 0:00:25.483 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:55
Saturday 10 August 2024 12:47:01 -0400 (0:00:00.061) 0:00:25.544 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Configure firewall] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71
Saturday 10 August 2024 12:47:01 -0400 (0:00:00.033) 0:00:25.577 *******
changed: [managed_node1] => (item={'port': '8000/tcp', 'state': 'enabled'}) => {
"__firewall_changed": true,
"ansible_loop_var": "item",
"changed": true,
"item": {
"port": "8000/tcp",
"state": "enabled"
}
}
changed: [managed_node1] => (item={'port': '9000/tcp', 'state': 'enabled'}) => {
"__firewall_changed": true,
"ansible_loop_var": "item",
"changed": true,
"item": {
"port": "9000/tcp",
"state": "enabled"
}
}
TASK [fedora.linux_system_roles.firewall : Gather firewall config information] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:120
Saturday 10 August 2024 12:47:03 -0400 (0:00:01.343) 0:00:26.921 *******
skipping: [managed_node1] => (item={'port': '8000/tcp', 'state': 'enabled'}) => {
"ansible_loop_var": "item",
"changed": false,
"item": {
"port": "8000/tcp",
"state": "enabled"
},
"skip_reason": "Conditional result was False"
}
skipping: [managed_node1] => (item={'port': '9000/tcp', 'state': 'enabled'}) => {
"ansible_loop_var": "item",
"changed": false,
"item": {
"port": "9000/tcp",
"state": "enabled"
},
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] *******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130
Saturday 10 August 2024 12:47:03 -0400 (0:00:00.055) 0:00:26.976 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:139
Saturday 10 August 2024 12:47:03 -0400 (0:00:00.036) 0:00:27.013 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] *******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:144
Saturday 10 August 2024 12:47:03 -0400 (0:00:00.033) 0:00:27.046 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:153
Saturday 10 August 2024 12:47:03 -0400 (0:00:00.033) 0:00:27.080 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Calculate what has changed] *********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:163
Saturday 10 August 2024 12:47:03 -0400 (0:00:00.031) 0:00:27.112 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Show diffs] *************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:169
Saturday 10 August 2024 12:47:03 -0400 (0:00:00.032) 0:00:27.144 *******
skipping: [managed_node1] => {}
TASK [Manage selinux for specified ports] **************************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:93
Saturday 10 August 2024 12:47:03 -0400 (0:00:00.033) 0:00:27.177 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:100
Saturday 10 August 2024 12:47:03 -0400 (0:00:00.031) 0:00:27.208 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_cancel_user_linger": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] *******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:104
Saturday 10 August 2024 12:47:03 -0400 (0:00:00.031) 0:00:27.240 *******
skipping: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle credential files - present] ****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:113
Saturday 10 August 2024 12:47:03 -0400 (0:00:00.029) 0:00:27.270 *******
skipping: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle secrets] ***********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:122
Saturday 10 August 2024 12:47:03 -0400 (0:00:00.056) 0:00:27.326 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed_node1
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed_node1
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Set variables part 1] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3
Saturday 10 August 2024 12:47:03 -0400 (0:00:00.143) 0:00:27.469 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set variables part 2] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7
Saturday 10 August 2024 12:47:03 -0400 (0:00:00.034) 0:00:27.504 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_rootless": false,
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:13
Saturday 10 August 2024 12:47:03 -0400 (0:00:00.042) 0:00:27.546 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 10 August 2024 12:47:03 -0400 (0:00:00.051) 0:00:27.598 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 10 August 2024 12:47:03 -0400 (0:00:00.032) 0:00:27.631 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 10 August 2024 12:47:03 -0400 (0:00:00.031) 0:00:27.662 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:18
Saturday 10 August 2024 12:47:03 -0400 (0:00:00.032) 0:00:27.694 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Manage each secret] *******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:34
Saturday 10 August 2024 12:47:03 -0400 (0:00:00.031) 0:00:27.726 *******
[WARNING]: Using a variable for a task's 'args' is unsafe in some situations
(see
https://docs.ansible.com/ansible/devel/reference_appendices/faq.html#argsplat-
unsafe)
changed: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Set variables part 1] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3
Saturday 10 August 2024 12:47:04 -0400 (0:00:00.656) 0:00:28.383 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set variables part 2] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7
Saturday 10 August 2024 12:47:04 -0400 (0:00:00.034) 0:00:28.417 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_rootless": false,
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:13
Saturday 10 August 2024 12:47:04 -0400 (0:00:00.041) 0:00:28.459 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 10 August 2024 12:47:04 -0400 (0:00:00.079) 0:00:28.538 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 10 August 2024 12:47:04 -0400 (0:00:00.032) 0:00:28.571 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 10 August 2024 12:47:04 -0400 (0:00:00.032) 0:00:28.603 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:18
Saturday 10 August 2024 12:47:04 -0400 (0:00:00.032) 0:00:28.636 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Manage each secret] *******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:34
Saturday 10 August 2024 12:47:04 -0400 (0:00:00.031) 0:00:28.668 *******
[WARNING]: Using a variable for a task's 'args' is unsafe in some situations
(see
https://docs.ansible.com/ansible/devel/reference_appendices/faq.html#argsplat-
unsafe)
changed: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Set variables part 1] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3
Saturday 10 August 2024 12:47:05 -0400 (0:00:00.528) 0:00:29.197 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set variables part 2] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7
Saturday 10 August 2024 12:47:05 -0400 (0:00:00.035) 0:00:29.232 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_rootless": false,
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:13
Saturday 10 August 2024 12:47:05 -0400 (0:00:00.041) 0:00:29.274 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 10 August 2024 12:47:05 -0400 (0:00:00.052) 0:00:29.326 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 10 August 2024 12:47:05 -0400 (0:00:00.032) 0:00:29.358 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 10 August 2024 12:47:05 -0400 (0:00:00.032) 0:00:29.390 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:18
Saturday 10 August 2024 12:47:05 -0400 (0:00:00.031) 0:00:29.422 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Manage each secret] *******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:34
Saturday 10 August 2024 12:47:05 -0400 (0:00:00.032) 0:00:29.454 *******
[WARNING]: Using a variable for a task's 'args' is unsafe in some situations
(see
https://docs.ansible.com/ansible/devel/reference_appendices/faq.html#argsplat-
unsafe)
changed: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129
Saturday 10 August 2024 12:47:06 -0400 (0:00:00.524) 0:00:29.979 *******
skipping: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:136
Saturday 10 August 2024 12:47:06 -0400 (0:00:00.059) 0:00:30.038 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed_node1
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed_node1
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed_node1
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed_node1
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed_node1
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Saturday 10 August 2024 12:47:06 -0400 (0:00:00.148) 0:00:30.187 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "quadlet-demo.network",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "[Network]\nSubnet=192.168.30.0/24\nGateway=192.168.30.1\nLabel=app=wordpress",
"__podman_quadlet_template_src": ""
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Saturday 10 August 2024 12:47:06 -0400 (0:00:00.045) 0:00:30.232 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "created",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Saturday 10 August 2024 12:47:06 -0400 (0:00:00.040) 0:00:30.273 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Saturday 10 August 2024 12:47:06 -0400 (0:00:00.034) 0:00:30.307 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo",
"__podman_quadlet_type": "network",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Saturday 10 August 2024 12:47:06 -0400 (0:00:00.047) 0:00:30.355 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 10 August 2024 12:47:06 -0400 (0:00:00.060) 0:00:30.415 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 10 August 2024 12:47:06 -0400 (0:00:00.039) 0:00:30.455 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 10 August 2024 12:47:06 -0400 (0:00:00.038) 0:00:30.494 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get group information] ****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Saturday 10 August 2024 12:47:06 -0400 (0:00:00.044) 0:00:30.538 *******
ok: [managed_node1] => {
"ansible_facts": {
"getent_group": {
"root": [
"x",
"0",
""
]
}
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set group name] ***********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35
Saturday 10 August 2024 12:47:07 -0400 (0:00:00.377) 0:00:30.916 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_group_name": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Saturday 10 August 2024 12:47:07 -0400 (0:00:00.042) 0:00:30.958 *******
ok: [managed_node1] => {
"changed": false,
"stat": {
"atime": 1723307429.1990302,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b",
"ctime": 1723307390.9915028,
"dev": 51713,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 6986657,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-sharedlib",
"mode": "0755",
"mtime": 1700557386.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 12640,
"uid": 0,
"version": "1934096266",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check user with getsubids] ************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:50
Saturday 10 August 2024 12:47:07 -0400 (0:00:00.393) 0:00:31.351 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check group with getsubids] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:55
Saturday 10 August 2024 12:47:07 -0400 (0:00:00.034) 0:00:31.385 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:60
Saturday 10 August 2024 12:47:07 -0400 (0:00:00.032) 0:00:31.417 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74
Saturday 10 August 2024 12:47:07 -0400 (0:00:00.033) 0:00:31.450 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79
Saturday 10 August 2024 12:47:07 -0400 (0:00:00.032) 0:00:31.483 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:84
Saturday 10 August 2024 12:47:07 -0400 (0:00:00.033) 0:00:31.516 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:94
Saturday 10 August 2024 12:47:07 -0400 (0:00:00.033) 0:00:31.550 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if group not in subgid file] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:101
Saturday 10 August 2024 12:47:07 -0400 (0:00:00.031) 0:00:31.581 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Saturday 10 August 2024 12:47:07 -0400 (0:00:00.032) 0:00:31.614 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "quadlet-demo-network.service",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Saturday 10 August 2024 12:47:07 -0400 (0:00:00.059) 0:00:31.674 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Saturday 10 August 2024 12:47:07 -0400 (0:00:00.036) 0:00:31.710 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Saturday 10 August 2024 12:47:07 -0400 (0:00:00.034) 0:00:31.745 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_images": [],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.network",
"__podman_volumes": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:103
Saturday 10 August 2024 12:47:07 -0400 (0:00:00.079) 0:00:31.824 *******
ok: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:110
Saturday 10 August 2024 12:47:07 -0400 (0:00:00.038) 0:00:31.863 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:114
Saturday 10 August 2024 12:47:08 -0400 (0:00:00.031) 0:00:31.894 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2
Saturday 10 August 2024 12:47:08 -0400 (0:00:00.071) 0:00:31.966 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 10 August 2024 12:47:08 -0400 (0:00:00.086) 0:00:32.053 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 10 August 2024 12:47:08 -0400 (0:00:00.033) 0:00:32.086 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 10 August 2024 12:47:08 -0400 (0:00:00.032) 0:00:32.119 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7
Saturday 10 August 2024 12:47:08 -0400 (0:00:00.033) 0:00:32.153 *******
TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
Saturday 10 August 2024 12:47:08 -0400 (0:00:00.031) 0:00:32.184 *******
skipping: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39
Saturday 10 August 2024 12:47:08 -0400 (0:00:00.032) 0:00:32.217 *******
ok: [managed_node1] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/etc/containers/systemd",
"secontext": "system_u:object_r:etc_t:s0",
"size": 6,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48
Saturday 10 August 2024 12:47:08 -0400 (0:00:00.377) 0:00:32.594 *******
changed: [managed_node1] => {
"changed": true,
"checksum": "e57c08d49aff4bae8daab138d913aeddaa8682a0",
"dest": "/etc/containers/systemd/quadlet-demo.network",
"gid": 0,
"group": "root",
"md5sum": "061f3cf318cbd8ab5794bb1173831fb8",
"mode": "0644",
"owner": "root",
"secontext": "system_u:object_r:etc_t:s0",
"size": 74,
"src": "/root/.ansible/tmp/ansible-tmp-1723308428.7586992-28296-197568005891150/source",
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58
Saturday 10 August 2024 12:47:09 -0400 (0:00:00.810) 0:00:33.405 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] *******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70
Saturday 10 August 2024 12:47:09 -0400 (0:00:00.033) 0:00:33.438 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Reload systemctl] *********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82
Saturday 10 August 2024 12:47:09 -0400 (0:00:00.033) 0:00:33.472 *******
ok: [managed_node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Start service] ************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110
Saturday 10 August 2024 12:47:10 -0400 (0:00:00.612) 0:00:34.085 *******
changed: [managed_node1] => {
"changed": true,
"name": "quadlet-demo-network.service",
"state": "started",
"status": {
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "sysinit.target -.mount systemd-journald.socket system.slice basic.target",
"AllowIsolate": "no",
"AllowedCPUs": "",
"AllowedMemoryNodes": "",
"AmbientCapabilities": "",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "no",
"CPUAffinity": "",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlPID": "0",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "quadlet-demo-network.service",
"DevicePolicy": "auto",
"DynamicUser": "no",
"EffectiveCPUs": "",
"EffectiveMemoryNodes": "",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet=192.168.30.0/24 --gateway=192.168.30.1 --label app=wordpress systemd-quadlet-demo ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FragmentPath": "/run/systemd/generator/quadlet-demo-network.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOSchedulingClass": "0",
"IOSchedulingPriority": "0",
"IOWeight": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "18446744073709551615",
"IPEgressPackets": "18446744073709551615",
"IPIngressBytes": "18446744073709551615",
"IPIngressPackets": "18446744073709551615",
"Id": "quadlet-demo-network.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "control-group",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "0",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "65536",
"LimitMEMLOCKSoft": "65536",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "262144",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14003",
"LimitNPROCSoft": "14003",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14003",
"LimitSIGPENDINGSoft": "14003",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"MemoryAccounting": "yes",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"MountFlags": "",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAMask": "",
"NUMAPolicy": "n/a",
"Names": "quadlet-demo-network.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"PermissionsStartOnly": "no",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"RemainAfterExit": "yes",
"RemoveIPC": "no",
"Requires": "-.mount system.slice sysinit.target",
"RequiresMountsFor": "/run/containers",
"Restart": "no",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"StandardError": "inherit",
"StandardInput": "null",
"StandardInputData": "",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestampMonotonic": "0",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogIdentifier": "quadlet-demo-network",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "0",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "22405",
"TimeoutStartUSec": "infinity",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "oneshot",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "generated",
"UtmpMode": "init",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.podman : Restart service] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125
Saturday 10 August 2024 12:47:10 -0400 (0:00:00.584) 0:00:34.670 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Saturday 10 August 2024 12:47:10 -0400 (0:00:00.035) 0:00:34.705 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "quadlet-demo-mysql.volume",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "[Volume]",
"__podman_quadlet_template_src": ""
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Saturday 10 August 2024 12:47:10 -0400 (0:00:00.044) 0:00:34.750 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "created",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Saturday 10 August 2024 12:47:10 -0400 (0:00:00.041) 0:00:34.792 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Saturday 10 August 2024 12:47:10 -0400 (0:00:00.033) 0:00:34.825 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo-mysql",
"__podman_quadlet_type": "volume",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Saturday 10 August 2024 12:47:10 -0400 (0:00:00.048) 0:00:34.873 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 10 August 2024 12:47:11 -0400 (0:00:00.062) 0:00:34.936 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 10 August 2024 12:47:11 -0400 (0:00:00.040) 0:00:34.976 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 10 August 2024 12:47:11 -0400 (0:00:00.038) 0:00:35.015 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get group information] ****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Saturday 10 August 2024 12:47:11 -0400 (0:00:00.046) 0:00:35.061 *******
ok: [managed_node1] => {
"ansible_facts": {
"getent_group": {
"root": [
"x",
"0",
""
]
}
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set group name] ***********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35
Saturday 10 August 2024 12:47:11 -0400 (0:00:00.382) 0:00:35.444 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_group_name": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Saturday 10 August 2024 12:47:11 -0400 (0:00:00.041) 0:00:35.486 *******
ok: [managed_node1] => {
"changed": false,
"stat": {
"atime": 1723307429.1990302,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b",
"ctime": 1723307390.9915028,
"dev": 51713,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 6986657,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-sharedlib",
"mode": "0755",
"mtime": 1700557386.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 12640,
"uid": 0,
"version": "1934096266",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check user with getsubids] ************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:50
Saturday 10 August 2024 12:47:11 -0400 (0:00:00.374) 0:00:35.861 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check group with getsubids] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:55
Saturday 10 August 2024 12:47:12 -0400 (0:00:00.063) 0:00:35.924 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:60
Saturday 10 August 2024 12:47:12 -0400 (0:00:00.034) 0:00:35.958 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74
Saturday 10 August 2024 12:47:12 -0400 (0:00:00.033) 0:00:35.992 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79
Saturday 10 August 2024 12:47:12 -0400 (0:00:00.034) 0:00:36.026 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:84
Saturday 10 August 2024 12:47:12 -0400 (0:00:00.034) 0:00:36.060 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:94
Saturday 10 August 2024 12:47:12 -0400 (0:00:00.034) 0:00:36.094 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if group not in subgid file] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:101
Saturday 10 August 2024 12:47:12 -0400 (0:00:00.033) 0:00:36.128 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Saturday 10 August 2024 12:47:12 -0400 (0:00:00.034) 0:00:36.162 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "quadlet-demo-mysql-volume.service",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Saturday 10 August 2024 12:47:12 -0400 (0:00:00.060) 0:00:36.223 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Saturday 10 August 2024 12:47:12 -0400 (0:00:00.034) 0:00:36.257 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Saturday 10 August 2024 12:47:12 -0400 (0:00:00.034) 0:00:36.291 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_images": [],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.volume",
"__podman_volumes": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:103
Saturday 10 August 2024 12:47:12 -0400 (0:00:00.080) 0:00:36.371 *******
ok: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:110
Saturday 10 August 2024 12:47:12 -0400 (0:00:00.038) 0:00:36.410 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:114
Saturday 10 August 2024 12:47:12 -0400 (0:00:00.032) 0:00:36.443 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2
Saturday 10 August 2024 12:47:12 -0400 (0:00:00.070) 0:00:36.513 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 10 August 2024 12:47:12 -0400 (0:00:00.055) 0:00:36.568 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 10 August 2024 12:47:12 -0400 (0:00:00.065) 0:00:36.634 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 10 August 2024 12:47:12 -0400 (0:00:00.032) 0:00:36.666 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7
Saturday 10 August 2024 12:47:12 -0400 (0:00:00.032) 0:00:36.699 *******
TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
Saturday 10 August 2024 12:47:12 -0400 (0:00:00.031) 0:00:36.731 *******
skipping: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39
Saturday 10 August 2024 12:47:12 -0400 (0:00:00.030) 0:00:36.761 *******
ok: [managed_node1] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/etc/containers/systemd",
"secontext": "system_u:object_r:etc_t:s0",
"size": 34,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48
Saturday 10 August 2024 12:47:13 -0400 (0:00:00.382) 0:00:37.144 *******
changed: [managed_node1] => {
"changed": true,
"checksum": "585f8cbdf0ec73000f9227dcffbef71e9552ea4a",
"dest": "/etc/containers/systemd/quadlet-demo-mysql.volume",
"gid": 0,
"group": "root",
"md5sum": "5ddd03a022aeb4502d9bc8ce436b4233",
"mode": "0644",
"owner": "root",
"secontext": "system_u:object_r:etc_t:s0",
"size": 9,
"src": "/root/.ansible/tmp/ansible-tmp-1723308433.3081214-28423-261850214882015/source",
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58
Saturday 10 August 2024 12:47:13 -0400 (0:00:00.693) 0:00:37.837 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] *******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70
Saturday 10 August 2024 12:47:13 -0400 (0:00:00.033) 0:00:37.871 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Reload systemctl] *********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82
Saturday 10 August 2024 12:47:14 -0400 (0:00:00.033) 0:00:37.905 *******
ok: [managed_node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Start service] ************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110
Saturday 10 August 2024 12:47:14 -0400 (0:00:00.621) 0:00:38.527 *******
changed: [managed_node1] => {
"changed": true,
"name": "quadlet-demo-mysql-volume.service",
"state": "started",
"status": {
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "sysinit.target -.mount systemd-journald.socket system.slice basic.target",
"AllowIsolate": "no",
"AllowedCPUs": "",
"AllowedMemoryNodes": "",
"AmbientCapabilities": "",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "no",
"CPUAffinity": "",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlPID": "0",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "quadlet-demo-mysql-volume.service",
"DevicePolicy": "auto",
"DynamicUser": "no",
"EffectiveCPUs": "",
"EffectiveMemoryNodes": "",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FragmentPath": "/run/systemd/generator/quadlet-demo-mysql-volume.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOSchedulingClass": "0",
"IOSchedulingPriority": "0",
"IOWeight": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "18446744073709551615",
"IPEgressPackets": "18446744073709551615",
"IPIngressBytes": "18446744073709551615",
"IPIngressPackets": "18446744073709551615",
"Id": "quadlet-demo-mysql-volume.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "control-group",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "0",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "65536",
"LimitMEMLOCKSoft": "65536",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "262144",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14003",
"LimitNPROCSoft": "14003",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14003",
"LimitSIGPENDINGSoft": "14003",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"MemoryAccounting": "yes",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"MountFlags": "",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAMask": "",
"NUMAPolicy": "n/a",
"Names": "quadlet-demo-mysql-volume.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"PermissionsStartOnly": "no",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"RemainAfterExit": "yes",
"RemoveIPC": "no",
"Requires": "-.mount system.slice sysinit.target",
"RequiresMountsFor": "/run/containers",
"Restart": "no",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"StandardError": "inherit",
"StandardInput": "null",
"StandardInputData": "",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestampMonotonic": "0",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogIdentifier": "quadlet-demo-mysql-volume",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "0",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "22405",
"TimeoutStartUSec": "infinity",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "oneshot",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "generated",
"UtmpMode": "init",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.podman : Restart service] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125
Saturday 10 August 2024 12:47:15 -0400 (0:00:00.603) 0:00:39.131 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Saturday 10 August 2024 12:47:15 -0400 (0:00:00.036) 0:00:39.167 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Container]\nImage=quay.io/linux-system-roles/mysql:5.6\nContainerName=quadlet-demo-mysql\nVolume=quadlet-demo-mysql.volume:/var/lib/mysql\nVolume=/tmp/quadlet_demo:/var/lib/quadlet_demo:Z\nNetwork=quadlet-demo.network\nSecret=mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD\nHealthCmd=/bin/true\nHealthOnFailure=kill\n",
"__podman_quadlet_template_src": "quadlet-demo-mysql.container.j2"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Saturday 10 August 2024 12:47:15 -0400 (0:00:00.101) 0:00:39.269 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "created",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Saturday 10 August 2024 12:47:15 -0400 (0:00:00.041) 0:00:39.310 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Saturday 10 August 2024 12:47:15 -0400 (0:00:00.036) 0:00:39.347 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo-mysql",
"__podman_quadlet_type": "container",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Saturday 10 August 2024 12:47:15 -0400 (0:00:00.047) 0:00:39.395 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 10 August 2024 12:47:15 -0400 (0:00:00.062) 0:00:39.458 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 10 August 2024 12:47:15 -0400 (0:00:00.039) 0:00:39.497 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 10 August 2024 12:47:15 -0400 (0:00:00.039) 0:00:39.537 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get group information] ****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Saturday 10 August 2024 12:47:15 -0400 (0:00:00.043) 0:00:39.581 *******
ok: [managed_node1] => {
"ansible_facts": {
"getent_group": {
"root": [
"x",
"0",
""
]
}
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set group name] ***********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35
Saturday 10 August 2024 12:47:16 -0400 (0:00:00.378) 0:00:39.959 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_group_name": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Saturday 10 August 2024 12:47:16 -0400 (0:00:00.041) 0:00:40.000 *******
ok: [managed_node1] => {
"changed": false,
"stat": {
"atime": 1723307429.1990302,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b",
"ctime": 1723307390.9915028,
"dev": 51713,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 6986657,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-sharedlib",
"mode": "0755",
"mtime": 1700557386.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 12640,
"uid": 0,
"version": "1934096266",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check user with getsubids] ************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:50
Saturday 10 August 2024 12:47:16 -0400 (0:00:00.371) 0:00:40.372 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check group with getsubids] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:55
Saturday 10 August 2024 12:47:16 -0400 (0:00:00.031) 0:00:40.403 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:60
Saturday 10 August 2024 12:47:16 -0400 (0:00:00.064) 0:00:40.468 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74
Saturday 10 August 2024 12:47:16 -0400 (0:00:00.032) 0:00:40.500 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79
Saturday 10 August 2024 12:47:16 -0400 (0:00:00.033) 0:00:40.533 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:84
Saturday 10 August 2024 12:47:16 -0400 (0:00:00.033) 0:00:40.567 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:94
Saturday 10 August 2024 12:47:16 -0400 (0:00:00.032) 0:00:40.599 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if group not in subgid file] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:101
Saturday 10 August 2024 12:47:16 -0400 (0:00:00.033) 0:00:40.632 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Saturday 10 August 2024 12:47:16 -0400 (0:00:00.031) 0:00:40.664 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [
"quay.io/linux-system-roles/mysql:5.6"
],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "quadlet-demo-mysql.service",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Saturday 10 August 2024 12:47:16 -0400 (0:00:00.060) 0:00:40.725 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Saturday 10 August 2024 12:47:16 -0400 (0:00:00.035) 0:00:40.760 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Saturday 10 August 2024 12:47:16 -0400 (0:00:00.033) 0:00:40.794 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_images": [
"quay.io/linux-system-roles/mysql:5.6"
],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.container",
"__podman_volumes": [
"/tmp/quadlet_demo"
]
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:103
Saturday 10 August 2024 12:47:16 -0400 (0:00:00.080) 0:00:40.874 *******
ok: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:110
Saturday 10 August 2024 12:47:17 -0400 (0:00:00.038) 0:00:40.912 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:114
Saturday 10 August 2024 12:47:17 -0400 (0:00:00.032) 0:00:40.945 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2
Saturday 10 August 2024 12:47:17 -0400 (0:00:00.070) 0:00:41.015 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 10 August 2024 12:47:17 -0400 (0:00:00.055) 0:00:41.071 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 10 August 2024 12:47:17 -0400 (0:00:00.031) 0:00:41.103 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 10 August 2024 12:47:17 -0400 (0:00:00.064) 0:00:41.168 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7
Saturday 10 August 2024 12:47:17 -0400 (0:00:00.032) 0:00:41.200 *******
[WARNING]: Using a variable for a task's 'args' is unsafe in some situations
(see
https://docs.ansible.com/ansible/devel/reference_appendices/faq.html#argsplat-
unsafe)
changed: [managed_node1] => (item=/tmp/quadlet_demo) => {
"ansible_loop_var": "item",
"changed": true,
"gid": 0,
"group": "root",
"item": "/tmp/quadlet_demo",
"mode": "0777",
"owner": "root",
"path": "/tmp/quadlet_demo",
"secontext": "unconfined_u:object_r:user_tmp_t:s0",
"size": 6,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
Saturday 10 August 2024 12:47:17 -0400 (0:00:00.391) 0:00:41.592 *******
changed: [managed_node1] => (item=None) => {
"attempts": 1,
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
changed: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39
Saturday 10 August 2024 12:47:25 -0400 (0:00:08.027) 0:00:49.619 *******
ok: [managed_node1] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/etc/containers/systemd",
"secontext": "system_u:object_r:etc_t:s0",
"size": 67,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48
Saturday 10 August 2024 12:47:26 -0400 (0:00:00.378) 0:00:49.997 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58
Saturday 10 August 2024 12:47:26 -0400 (0:00:00.031) 0:00:50.029 *******
changed: [managed_node1] => {
"changed": true,
"checksum": "ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4",
"dest": "/etc/containers/systemd/quadlet-demo-mysql.container",
"gid": 0,
"group": "root",
"md5sum": "341b473056d2a5dfa35970b0d2e23a5d",
"mode": "0644",
"owner": "root",
"secontext": "system_u:object_r:etc_t:s0",
"size": 363,
"src": "/root/.ansible/tmp/ansible-tmp-1723308446.193933-28568-131028171043796/source",
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] *******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70
Saturday 10 August 2024 12:47:26 -0400 (0:00:00.684) 0:00:50.714 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Reload systemctl] *********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82
Saturday 10 August 2024 12:47:26 -0400 (0:00:00.031) 0:00:50.746 *******
ok: [managed_node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Start service] ************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110
Saturday 10 August 2024 12:47:27 -0400 (0:00:00.612) 0:00:51.359 *******
changed: [managed_node1] => {
"changed": true,
"name": "quadlet-demo-mysql.service",
"state": "started",
"status": {
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "systemd-journald.socket quadlet-demo-network.service sysinit.target system.slice basic.target quadlet-demo-mysql-volume.service tmp.mount -.mount",
"AllowIsolate": "no",
"AllowedCPUs": "",
"AllowedMemoryNodes": "",
"AmbientCapabilities": "",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "no",
"CPUAffinity": "",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlPID": "0",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "yes",
"DelegateControllers": "cpu cpuacct cpuset io blkio memory devices pids",
"Description": "quadlet-demo-mysql.service",
"DevicePolicy": "auto",
"DynamicUser": "no",
"EffectiveCPUs": "",
"EffectiveMemoryNodes": "",
"Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name=quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network=systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FragmentPath": "/run/systemd/generator/quadlet-demo-mysql.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOSchedulingClass": "0",
"IOSchedulingPriority": "0",
"IOWeight": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "18446744073709551615",
"IPEgressPackets": "18446744073709551615",
"IPIngressBytes": "18446744073709551615",
"IPIngressPackets": "18446744073709551615",
"Id": "quadlet-demo-mysql.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "0",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "65536",
"LimitMEMLOCKSoft": "65536",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "262144",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14003",
"LimitNPROCSoft": "14003",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14003",
"LimitSIGPENDINGSoft": "14003",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"MemoryAccounting": "yes",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"MountFlags": "",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAMask": "",
"NUMAPolicy": "n/a",
"Names": "quadlet-demo-mysql.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "all",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"PermissionsStartOnly": "no",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "sysinit.target -.mount quadlet-demo-mysql-volume.service system.slice quadlet-demo-network.service",
"RequiresMountsFor": "/tmp/quadlet_demo /run/containers",
"Restart": "no",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.container",
"StandardError": "inherit",
"StandardInput": "null",
"StandardInputData": "",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestampMonotonic": "0",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogIdentifier": "quadlet-demo-mysql",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "0",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "22405",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "notify",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "generated",
"UtmpMode": "init",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.podman : Restart service] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125
Saturday 10 August 2024 12:47:28 -0400 (0:00:00.986) 0:00:52.345 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Saturday 10 August 2024 12:47:28 -0400 (0:00:00.035) 0:00:52.380 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "envoy-proxy-configmap.yml",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "---\napiVersion: v1\nkind: ConfigMap\nmetadata:\n name: envoy-proxy-config\ndata:\n envoy.yaml: |\n admin:\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 9901\n\n static_resources:\n listeners:\n - name: listener_0\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 8080\n filter_chains:\n - filters:\n - name: envoy.filters.network.http_connection_manager\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager\n stat_prefix: ingress_http\n codec_type: AUTO\n route_config:\n name: local_route\n virtual_hosts:\n - name: local_service\n domains: [\"*\"]\n routes:\n - match:\n prefix: \"/\"\n route:\n cluster: backend\n http_filters:\n - name: envoy.filters.http.router\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.http.router.v3.Router\n transport_socket:\n name: envoy.transport_sockets.tls\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.transport_sockets.tls.v3.DownstreamTlsContext\n common_tls_context:\n tls_certificates:\n - certificate_chain:\n filename: /etc/envoy-certificates/certificate.pem\n private_key:\n filename: /etc/envoy-certificates/certificate.key\n clusters:\n - name: backend\n connect_timeout: 5s\n type: STATIC\n dns_refresh_rate: 1800s\n lb_policy: ROUND_ROBIN\n load_assignment:\n cluster_name: backend\n endpoints:\n - lb_endpoints:\n - endpoint:\n address:\n socket_address:\n address: 127.0.0.1\n port_value: 80",
"__podman_quadlet_template_src": ""
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Saturday 10 August 2024 12:47:28 -0400 (0:00:00.044) 0:00:52.425 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "created",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Saturday 10 August 2024 12:47:28 -0400 (0:00:00.041) 0:00:52.467 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Saturday 10 August 2024 12:47:28 -0400 (0:00:00.035) 0:00:52.502 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_name": "envoy-proxy-configmap",
"__podman_quadlet_type": "yml",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Saturday 10 August 2024 12:47:28 -0400 (0:00:00.046) 0:00:52.549 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 10 August 2024 12:47:28 -0400 (0:00:00.061) 0:00:52.611 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 10 August 2024 12:47:28 -0400 (0:00:00.040) 0:00:52.651 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 10 August 2024 12:47:28 -0400 (0:00:00.040) 0:00:52.691 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get group information] ****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Saturday 10 August 2024 12:47:28 -0400 (0:00:00.044) 0:00:52.735 *******
ok: [managed_node1] => {
"ansible_facts": {
"getent_group": {
"root": [
"x",
"0",
""
]
}
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set group name] ***********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35
Saturday 10 August 2024 12:47:29 -0400 (0:00:00.387) 0:00:53.123 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_group_name": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Saturday 10 August 2024 12:47:29 -0400 (0:00:00.041) 0:00:53.165 *******
ok: [managed_node1] => {
"changed": false,
"stat": {
"atime": 1723307429.1990302,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b",
"ctime": 1723307390.9915028,
"dev": 51713,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 6986657,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-sharedlib",
"mode": "0755",
"mtime": 1700557386.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 12640,
"uid": 0,
"version": "1934096266",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check user with getsubids] ************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:50
Saturday 10 August 2024 12:47:29 -0400 (0:00:00.407) 0:00:53.573 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check group with getsubids] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:55
Saturday 10 August 2024 12:47:29 -0400 (0:00:00.032) 0:00:53.605 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:60
Saturday 10 August 2024 12:47:29 -0400 (0:00:00.032) 0:00:53.638 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74
Saturday 10 August 2024 12:47:29 -0400 (0:00:00.032) 0:00:53.670 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79
Saturday 10 August 2024 12:47:29 -0400 (0:00:00.033) 0:00:53.704 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:84
Saturday 10 August 2024 12:47:29 -0400 (0:00:00.033) 0:00:53.738 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:94
Saturday 10 August 2024 12:47:29 -0400 (0:00:00.032) 0:00:53.770 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if group not in subgid file] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:101
Saturday 10 August 2024 12:47:29 -0400 (0:00:00.032) 0:00:53.803 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Saturday 10 August 2024 12:47:29 -0400 (0:00:00.032) 0:00:53.836 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Saturday 10 August 2024 12:47:30 -0400 (0:00:00.061) 0:00:53.897 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Saturday 10 August 2024 12:47:30 -0400 (0:00:00.035) 0:00:53.932 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Saturday 10 August 2024 12:47:30 -0400 (0:00:00.033) 0:00:53.966 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_images": [],
"__podman_quadlet_file": "/etc/containers/systemd/envoy-proxy-configmap.yml",
"__podman_volumes": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:103
Saturday 10 August 2024 12:47:30 -0400 (0:00:00.080) 0:00:54.046 *******
ok: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:110
Saturday 10 August 2024 12:47:30 -0400 (0:00:00.038) 0:00:54.085 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:114
Saturday 10 August 2024 12:47:30 -0400 (0:00:00.066) 0:00:54.152 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2
Saturday 10 August 2024 12:47:30 -0400 (0:00:00.072) 0:00:54.225 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 10 August 2024 12:47:30 -0400 (0:00:00.056) 0:00:54.281 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 10 August 2024 12:47:30 -0400 (0:00:00.033) 0:00:54.315 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 10 August 2024 12:47:30 -0400 (0:00:00.032) 0:00:54.347 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7
Saturday 10 August 2024 12:47:30 -0400 (0:00:00.032) 0:00:54.380 *******
TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
Saturday 10 August 2024 12:47:30 -0400 (0:00:00.031) 0:00:54.411 *******
skipping: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39
Saturday 10 August 2024 12:47:30 -0400 (0:00:00.030) 0:00:54.442 *******
ok: [managed_node1] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/etc/containers/systemd",
"secontext": "system_u:object_r:etc_t:s0",
"size": 103,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48
Saturday 10 August 2024 12:47:30 -0400 (0:00:00.410) 0:00:54.852 *******
changed: [managed_node1] => {
"changed": true,
"checksum": "d681c7d56f912150d041873e880818b22a90c188",
"dest": "/etc/containers/systemd/envoy-proxy-configmap.yml",
"gid": 0,
"group": "root",
"md5sum": "aec75d972c231aac004e1338934544cf",
"mode": "0644",
"owner": "root",
"secontext": "system_u:object_r:etc_t:s0",
"size": 2102,
"src": "/root/.ansible/tmp/ansible-tmp-1723308451.0169137-28693-186169960018362/source",
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58
Saturday 10 August 2024 12:47:31 -0400 (0:00:00.760) 0:00:55.613 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] *******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70
Saturday 10 August 2024 12:47:31 -0400 (0:00:00.034) 0:00:55.648 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Reload systemctl] *********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82
Saturday 10 August 2024 12:47:31 -0400 (0:00:00.032) 0:00:55.680 *******
ok: [managed_node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Start service] ************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110
Saturday 10 August 2024 12:47:32 -0400 (0:00:00.631) 0:00:56.312 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Restart service] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125
Saturday 10 August 2024 12:47:32 -0400 (0:00:00.037) 0:00:56.350 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Saturday 10 August 2024 12:47:32 -0400 (0:00:00.039) 0:00:56.389 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "---\napiVersion: v1\nkind: PersistentVolumeClaim\nmetadata:\n name: wp-pv-claim\n labels:\n app: wordpress\nspec:\n accessModes:\n - ReadWriteOnce\n resources:\n requests:\n storage: 20Gi\n---\napiVersion: v1\nkind: Pod\nmetadata:\n name: quadlet-demo\nspec:\n containers:\n - name: wordpress\n image: quay.io/linux-system-roles/wordpress:4.8-apache\n env:\n - name: WORDPRESS_DB_HOST\n value: quadlet-demo-mysql\n - name: WORDPRESS_DB_PASSWORD\n valueFrom:\n secretKeyRef:\n name: mysql-root-password-kube\n key: password\n volumeMounts:\n - name: wordpress-persistent-storage\n mountPath: /var/www/html\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n - name: envoy\n image: quay.io/linux-system-roles/envoyproxy:v1.25.0\n volumeMounts:\n - name: config-volume\n mountPath: /etc/envoy\n - name: certificates\n mountPath: /etc/envoy-certificates\n env:\n - name: ENVOY_UID\n value: \"0\"\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n volumes:\n - name: config-volume\n configMap:\n name: envoy-proxy-config\n - name: certificates\n secret:\n secretName: envoy-certificates\n - name: wordpress-persistent-storage\n persistentVolumeClaim:\n claimName: wp-pv-claim\n - name: www # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3\n - name: create # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3-create\n",
"__podman_quadlet_template_src": "quadlet-demo.yml.j2"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Saturday 10 August 2024 12:47:32 -0400 (0:00:00.097) 0:00:56.487 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "created",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Saturday 10 August 2024 12:47:32 -0400 (0:00:00.041) 0:00:56.528 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Saturday 10 August 2024 12:47:32 -0400 (0:00:00.035) 0:00:56.564 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo",
"__podman_quadlet_type": "yml",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Saturday 10 August 2024 12:47:32 -0400 (0:00:00.050) 0:00:56.614 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 10 August 2024 12:47:32 -0400 (0:00:00.061) 0:00:56.676 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 10 August 2024 12:47:32 -0400 (0:00:00.040) 0:00:56.716 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 10 August 2024 12:47:32 -0400 (0:00:00.039) 0:00:56.756 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get group information] ****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Saturday 10 August 2024 12:47:32 -0400 (0:00:00.044) 0:00:56.800 *******
ok: [managed_node1] => {
"ansible_facts": {
"getent_group": {
"root": [
"x",
"0",
""
]
}
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set group name] ***********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35
Saturday 10 August 2024 12:47:33 -0400 (0:00:00.381) 0:00:57.181 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_group_name": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Saturday 10 August 2024 12:47:33 -0400 (0:00:00.041) 0:00:57.223 *******
ok: [managed_node1] => {
"changed": false,
"stat": {
"atime": 1723307429.1990302,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b",
"ctime": 1723307390.9915028,
"dev": 51713,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 6986657,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-sharedlib",
"mode": "0755",
"mtime": 1700557386.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 12640,
"uid": 0,
"version": "1934096266",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check user with getsubids] ************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:50
Saturday 10 August 2024 12:47:33 -0400 (0:00:00.476) 0:00:57.699 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check group with getsubids] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:55
Saturday 10 August 2024 12:47:33 -0400 (0:00:00.032) 0:00:57.732 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:60
Saturday 10 August 2024 12:47:33 -0400 (0:00:00.032) 0:00:57.765 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74
Saturday 10 August 2024 12:47:33 -0400 (0:00:00.032) 0:00:57.797 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79
Saturday 10 August 2024 12:47:33 -0400 (0:00:00.033) 0:00:57.831 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:84
Saturday 10 August 2024 12:47:33 -0400 (0:00:00.033) 0:00:57.865 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:94
Saturday 10 August 2024 12:47:34 -0400 (0:00:00.031) 0:00:57.896 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if group not in subgid file] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:101
Saturday 10 August 2024 12:47:34 -0400 (0:00:00.032) 0:00:57.929 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Saturday 10 August 2024 12:47:34 -0400 (0:00:00.031) 0:00:57.961 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Saturday 10 August 2024 12:47:34 -0400 (0:00:00.060) 0:00:58.021 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Saturday 10 August 2024 12:47:34 -0400 (0:00:00.034) 0:00:58.056 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Saturday 10 August 2024 12:47:34 -0400 (0:00:00.032) 0:00:58.089 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_images": [],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.yml",
"__podman_volumes": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:103
Saturday 10 August 2024 12:47:34 -0400 (0:00:00.079) 0:00:58.169 *******
ok: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:110
Saturday 10 August 2024 12:47:34 -0400 (0:00:00.037) 0:00:58.207 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:114
Saturday 10 August 2024 12:47:34 -0400 (0:00:00.032) 0:00:58.239 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2
Saturday 10 August 2024 12:47:34 -0400 (0:00:00.070) 0:00:58.309 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 10 August 2024 12:47:34 -0400 (0:00:00.087) 0:00:58.397 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 10 August 2024 12:47:34 -0400 (0:00:00.032) 0:00:58.430 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 10 August 2024 12:47:34 -0400 (0:00:00.033) 0:00:58.463 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7
Saturday 10 August 2024 12:47:34 -0400 (0:00:00.032) 0:00:58.496 *******
TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
Saturday 10 August 2024 12:47:34 -0400 (0:00:00.031) 0:00:58.528 *******
skipping: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39
Saturday 10 August 2024 12:47:34 -0400 (0:00:00.031) 0:00:58.560 *******
ok: [managed_node1] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/etc/containers/systemd",
"secontext": "system_u:object_r:etc_t:s0",
"size": 136,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48
Saturday 10 August 2024 12:47:35 -0400 (0:00:00.551) 0:00:59.111 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58
Saturday 10 August 2024 12:47:35 -0400 (0:00:00.032) 0:00:59.144 *******
changed: [managed_node1] => {
"changed": true,
"checksum": "998dccde0483b1654327a46ddd89cbaa47650370",
"dest": "/etc/containers/systemd/quadlet-demo.yml",
"gid": 0,
"group": "root",
"md5sum": "fd890594adfc24339cb9cdc5e7b19a66",
"mode": "0644",
"owner": "root",
"secontext": "system_u:object_r:etc_t:s0",
"size": 1605,
"src": "/root/.ansible/tmp/ansible-tmp-1723308455.3186655-28815-246988955991842/source",
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] *******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70
Saturday 10 August 2024 12:47:36 -0400 (0:00:01.022) 0:01:00.166 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Reload systemctl] *********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82
Saturday 10 August 2024 12:47:36 -0400 (0:00:00.032) 0:01:00.199 *******
ok: [managed_node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Start service] ************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110
Saturday 10 August 2024 12:47:36 -0400 (0:00:00.669) 0:01:00.869 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Restart service] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125
Saturday 10 August 2024 12:47:37 -0400 (0:00:00.036) 0:01:00.905 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Saturday 10 August 2024 12:47:37 -0400 (0:00:00.039) 0:01:00.945 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "quadlet-demo.kube",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Unit]\nRequires=quadlet-demo-mysql.service\nAfter=quadlet-demo-mysql.service\n\n[Kube]\n# Point to the yaml file in the same directory\nYaml=quadlet-demo.yml\n# Use the quadlet-demo network\nNetwork=quadlet-demo.network\n# Publish the envoy proxy data port\nPublishPort=8000:8080\n# Publish the envoy proxy admin port\nPublishPort=9000:9901\n# Use the envoy proxy config map in the same directory\nConfigMap=envoy-proxy-configmap.yml",
"__podman_quadlet_template_src": ""
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Saturday 10 August 2024 12:47:37 -0400 (0:00:00.045) 0:01:00.991 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "created",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Saturday 10 August 2024 12:47:37 -0400 (0:00:00.041) 0:01:01.033 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Saturday 10 August 2024 12:47:37 -0400 (0:00:00.034) 0:01:01.067 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo",
"__podman_quadlet_type": "kube",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Saturday 10 August 2024 12:47:37 -0400 (0:00:00.046) 0:01:01.114 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 10 August 2024 12:47:37 -0400 (0:00:00.061) 0:01:01.176 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 10 August 2024 12:47:37 -0400 (0:00:00.040) 0:01:01.216 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 10 August 2024 12:47:37 -0400 (0:00:00.039) 0:01:01.256 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get group information] ****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Saturday 10 August 2024 12:47:37 -0400 (0:00:00.044) 0:01:01.300 *******
ok: [managed_node1] => {
"ansible_facts": {
"getent_group": {
"root": [
"x",
"0",
""
]
}
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set group name] ***********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35
Saturday 10 August 2024 12:47:37 -0400 (0:00:00.383) 0:01:01.683 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_group_name": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Saturday 10 August 2024 12:47:37 -0400 (0:00:00.041) 0:01:01.725 *******
ok: [managed_node1] => {
"changed": false,
"stat": {
"atime": 1723307429.1990302,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b",
"ctime": 1723307390.9915028,
"dev": 51713,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 6986657,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-sharedlib",
"mode": "0755",
"mtime": 1700557386.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 12640,
"uid": 0,
"version": "1934096266",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check user with getsubids] ************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:50
Saturday 10 August 2024 12:47:38 -0400 (0:00:00.402) 0:01:02.127 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check group with getsubids] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:55
Saturday 10 August 2024 12:47:38 -0400 (0:00:00.032) 0:01:02.160 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:60
Saturday 10 August 2024 12:47:38 -0400 (0:00:00.033) 0:01:02.193 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74
Saturday 10 August 2024 12:47:38 -0400 (0:00:00.032) 0:01:02.225 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79
Saturday 10 August 2024 12:47:38 -0400 (0:00:00.032) 0:01:02.258 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:84
Saturday 10 August 2024 12:47:38 -0400 (0:00:00.031) 0:01:02.290 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:94
Saturday 10 August 2024 12:47:38 -0400 (0:00:00.033) 0:01:02.324 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if group not in subgid file] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:101
Saturday 10 August 2024 12:47:38 -0400 (0:00:00.033) 0:01:02.357 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Saturday 10 August 2024 12:47:38 -0400 (0:00:00.032) 0:01:02.389 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": [
"quadlet-demo.yml"
],
"__podman_service_name": "quadlet-demo.service",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Saturday 10 August 2024 12:47:38 -0400 (0:00:00.059) 0:01:02.449 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Saturday 10 August 2024 12:47:38 -0400 (0:00:00.034) 0:01:02.484 *******
ok: [managed_node1] => {
"changed": false,
"content": "LS0tCmFwaVZlcnNpb246IHYxCmtpbmQ6IFBlcnNpc3RlbnRWb2x1bWVDbGFpbQptZXRhZGF0YToKICBuYW1lOiB3cC1wdi1jbGFpbQogIGxhYmVsczoKICAgIGFwcDogd29yZHByZXNzCnNwZWM6CiAgYWNjZXNzTW9kZXM6CiAgLSBSZWFkV3JpdGVPbmNlCiAgcmVzb3VyY2VzOgogICAgcmVxdWVzdHM6CiAgICAgIHN0b3JhZ2U6IDIwR2kKLS0tCmFwaVZlcnNpb246IHYxCmtpbmQ6IFBvZAptZXRhZGF0YToKICBuYW1lOiBxdWFkbGV0LWRlbW8Kc3BlYzoKICBjb250YWluZXJzOgogIC0gbmFtZTogd29yZHByZXNzCiAgICBpbWFnZTogcXVheS5pby9saW51eC1zeXN0ZW0tcm9sZXMvd29yZHByZXNzOjQuOC1hcGFjaGUKICAgIGVudjoKICAgIC0gbmFtZTogV09SRFBSRVNTX0RCX0hPU1QKICAgICAgdmFsdWU6IHF1YWRsZXQtZGVtby1teXNxbAogICAgLSBuYW1lOiBXT1JEUFJFU1NfREJfUEFTU1dPUkQKICAgICAgdmFsdWVGcm9tOgogICAgICAgIHNlY3JldEtleVJlZjoKICAgICAgICAgIG5hbWU6IG15c3FsLXJvb3QtcGFzc3dvcmQta3ViZQogICAgICAgICAga2V5OiBwYXNzd29yZAogICAgdm9sdW1lTW91bnRzOgogICAgLSBuYW1lOiB3b3JkcHJlc3MtcGVyc2lzdGVudC1zdG9yYWdlCiAgICAgIG1vdW50UGF0aDogL3Zhci93d3cvaHRtbAogICAgcmVzb3VyY2VzOgogICAgICByZXF1ZXN0czoKICAgICAgICBtZW1vcnk6ICI2NE1pIgogICAgICAgIGNwdTogIjI1MG0iCiAgICAgIGxpbWl0czoKICAgICAgICBtZW1vcnk6ICIxMjhNaSIKICAgICAgICBjcHU6ICI1MDBtIgogIC0gbmFtZTogZW52b3kKICAgIGltYWdlOiBxdWF5LmlvL2xpbnV4LXN5c3RlbS1yb2xlcy9lbnZveXByb3h5OnYxLjI1LjAKICAgIHZvbHVtZU1vdW50czoKICAgIC0gbmFtZTogY29uZmlnLXZvbHVtZQogICAgICBtb3VudFBhdGg6IC9ldGMvZW52b3kKICAgIC0gbmFtZTogY2VydGlmaWNhdGVzCiAgICAgIG1vdW50UGF0aDogL2V0Yy9lbnZveS1jZXJ0aWZpY2F0ZXMKICAgIGVudjoKICAgIC0gbmFtZTogRU5WT1lfVUlECiAgICAgIHZhbHVlOiAiMCIKICAgIHJlc291cmNlczoKICAgICAgcmVxdWVzdHM6CiAgICAgICAgbWVtb3J5OiAiNjRNaSIKICAgICAgICBjcHU6ICIyNTBtIgogICAgICBsaW1pdHM6CiAgICAgICAgbWVtb3J5OiAiMTI4TWkiCiAgICAgICAgY3B1OiAiNTAwbSIKICB2b2x1bWVzOgogIC0gbmFtZTogY29uZmlnLXZvbHVtZQogICAgY29uZmlnTWFwOgogICAgICBuYW1lOiBlbnZveS1wcm94eS1jb25maWcKICAtIG5hbWU6IGNlcnRpZmljYXRlcwogICAgc2VjcmV0OgogICAgICBzZWNyZXROYW1lOiBlbnZveS1jZXJ0aWZpY2F0ZXMKICAtIG5hbWU6IHdvcmRwcmVzcy1wZXJzaXN0ZW50LXN0b3JhZ2UKICAgIHBlcnNpc3RlbnRWb2x1bWVDbGFpbToKICAgICAgY2xhaW1OYW1lOiB3cC1wdi1jbGFpbQogIC0gbmFtZTogd3d3ICAjIG5vdCB1c2VkIC0gZm9yIHRlc3RpbmcgaG9zdHBhdGgKICAgIGhvc3RQYXRoOgogICAgICBwYXRoOiAvdG1wL2h0dHBkMwogIC0gbmFtZTogY3JlYXRlICAjIG5vdCB1c2VkIC0gZm9yIHRlc3RpbmcgaG9zdHBhdGgKICAgIGhvc3RQYXRoOgogICAgICBwYXRoOiAvdG1wL2h0dHBkMy1jcmVhdGUK",
"encoding": "base64",
"source": "/etc/containers/systemd/quadlet-demo.yml"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Saturday 10 August 2024 12:47:39 -0400 (0:00:00.430) 0:01:02.914 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_images": [
"quay.io/linux-system-roles/wordpress:4.8-apache",
"quay.io/linux-system-roles/envoyproxy:v1.25.0"
],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.kube",
"__podman_volumes": [
"/tmp/httpd3",
"/tmp/httpd3-create"
]
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:103
Saturday 10 August 2024 12:47:39 -0400 (0:00:00.141) 0:01:03.056 *******
ok: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:110
Saturday 10 August 2024 12:47:39 -0400 (0:00:00.038) 0:01:03.094 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:114
Saturday 10 August 2024 12:47:39 -0400 (0:00:00.032) 0:01:03.126 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2
Saturday 10 August 2024 12:47:39 -0400 (0:00:00.070) 0:01:03.197 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 10 August 2024 12:47:39 -0400 (0:00:00.087) 0:01:03.285 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 10 August 2024 12:47:39 -0400 (0:00:00.033) 0:01:03.318 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 10 August 2024 12:47:39 -0400 (0:00:00.032) 0:01:03.351 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7
Saturday 10 August 2024 12:47:39 -0400 (0:00:00.031) 0:01:03.383 *******
[WARNING]: Using a variable for a task's 'args' is unsafe in some situations
(see
https://docs.ansible.com/ansible/devel/reference_appendices/faq.html#argsplat-
unsafe)
changed: [managed_node1] => (item=/tmp/httpd3) => {
"ansible_loop_var": "item",
"changed": true,
"gid": 0,
"group": "root",
"item": "/tmp/httpd3",
"mode": "0755",
"owner": "root",
"path": "/tmp/httpd3",
"secontext": "unconfined_u:object_r:user_tmp_t:s0",
"size": 6,
"state": "directory",
"uid": 0
}
changed: [managed_node1] => (item=/tmp/httpd3-create) => {
"ansible_loop_var": "item",
"changed": true,
"gid": 0,
"group": "root",
"item": "/tmp/httpd3-create",
"mode": "0755",
"owner": "root",
"path": "/tmp/httpd3-create",
"secontext": "unconfined_u:object_r:user_tmp_t:s0",
"size": 6,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
Saturday 10 August 2024 12:47:40 -0400 (0:00:00.750) 0:01:04.133 *******
changed: [managed_node1] => (item=None) => {
"attempts": 1,
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
changed: [managed_node1] => (item=None) => {
"attempts": 1,
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
changed: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39
Saturday 10 August 2024 12:47:58 -0400 (0:00:17.877) 0:01:22.011 *******
ok: [managed_node1] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/etc/containers/systemd",
"secontext": "system_u:object_r:etc_t:s0",
"size": 160,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48
Saturday 10 August 2024 12:47:58 -0400 (0:00:00.385) 0:01:22.396 *******
changed: [managed_node1] => {
"changed": true,
"checksum": "7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7",
"dest": "/etc/containers/systemd/quadlet-demo.kube",
"gid": 0,
"group": "root",
"md5sum": "da53c88f92b68b0487aa209f795b6bb3",
"mode": "0644",
"owner": "root",
"secontext": "system_u:object_r:etc_t:s0",
"size": 456,
"src": "/root/.ansible/tmp/ansible-tmp-1723308478.5610893-28968-259954300491430/source",
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58
Saturday 10 August 2024 12:47:59 -0400 (0:00:00.764) 0:01:23.161 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] *******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70
Saturday 10 August 2024 12:47:59 -0400 (0:00:00.034) 0:01:23.196 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Reload systemctl] *********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82
Saturday 10 August 2024 12:47:59 -0400 (0:00:00.034) 0:01:23.230 *******
ok: [managed_node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Start service] ************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110
Saturday 10 August 2024 12:47:59 -0400 (0:00:00.620) 0:01:23.850 *******
changed: [managed_node1] => {
"changed": true,
"name": "quadlet-demo.service",
"state": "started",
"status": {
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "system.slice -.mount systemd-journald.socket quadlet-demo-network.service sysinit.target quadlet-demo-mysql.service basic.target",
"AllowIsolate": "no",
"AllowedCPUs": "",
"AllowedMemoryNodes": "",
"AmbientCapabilities": "",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "no",
"CPUAffinity": "",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlPID": "0",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "quadlet-demo.service",
"DevicePolicy": "auto",
"DynamicUser": "no",
"EffectiveCPUs": "",
"EffectiveMemoryNodes": "",
"Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo.service",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network=systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FragmentPath": "/run/systemd/generator/quadlet-demo.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOSchedulingClass": "0",
"IOSchedulingPriority": "0",
"IOWeight": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "18446744073709551615",
"IPEgressPackets": "18446744073709551615",
"IPIngressBytes": "18446744073709551615",
"IPIngressPackets": "18446744073709551615",
"Id": "quadlet-demo.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "0",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "65536",
"LimitMEMLOCKSoft": "65536",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "262144",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14003",
"LimitNPROCSoft": "14003",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14003",
"LimitSIGPENDINGSoft": "14003",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"MemoryAccounting": "yes",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"MountFlags": "",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAMask": "",
"NUMAPolicy": "n/a",
"Names": "quadlet-demo.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "all",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"PermissionsStartOnly": "no",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "system.slice quadlet-demo-network.service sysinit.target quadlet-demo-mysql.service -.mount",
"RequiresMountsFor": "/run/containers",
"Restart": "no",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"SourcePath": "/etc/containers/systemd/quadlet-demo.kube",
"StandardError": "inherit",
"StandardInput": "null",
"StandardInputData": "",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestampMonotonic": "0",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogIdentifier": "quadlet-demo",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "0",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "22405",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "notify",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "generated",
"UtmpMode": "init",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.podman : Restart service] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125
Saturday 10 August 2024 12:48:01 -0400 (0:00:01.900) 0:01:25.751 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Cancel linger] ************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:143
Saturday 10 August 2024 12:48:01 -0400 (0:00:00.066) 0:01:25.818 *******
TASK [fedora.linux_system_roles.podman : Handle credential files - absent] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:149
Saturday 10 August 2024 12:48:01 -0400 (0:00:00.031) 0:01:25.849 *******
skipping: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:158
Saturday 10 August 2024 12:48:01 -0400 (0:00:00.031) 0:01:25.881 *******
skipping: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [Check quadlet files] *****************************************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:84
Saturday 10 August 2024 12:48:02 -0400 (0:00:00.031) 0:01:25.913 *******
ok: [managed_node1] => {
"changed": false,
"cmd": [
"ls",
"-alrtF",
"/etc/containers/systemd"
],
"delta": "0:00:00.005629",
"end": "2024-08-10 12:48:02.411447",
"rc": 0,
"start": "2024-08-10 12:48:02.405818"
}
STDOUT:
total 24
drwxr-xr-x. 9 root root 187 Aug 10 12:40 ../
-rw-r--r--. 1 root root 74 Aug 10 12:47 quadlet-demo.network
-rw-r--r--. 1 root root 9 Aug 10 12:47 quadlet-demo-mysql.volume
-rw-r--r--. 1 root root 363 Aug 10 12:47 quadlet-demo-mysql.container
-rw-r--r--. 1 root root 2102 Aug 10 12:47 envoy-proxy-configmap.yml
-rw-r--r--. 1 root root 1605 Aug 10 12:47 quadlet-demo.yml
-rw-r--r--. 1 root root 456 Aug 10 12:47 quadlet-demo.kube
drwxr-xr-x. 2 root root 185 Aug 10 12:47 ./
TASK [Check containers] ********************************************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:88
Saturday 10 August 2024 12:48:02 -0400 (0:00:00.454) 0:01:26.367 *******
ok: [managed_node1] => {
"changed": false,
"cmd": [
"podman",
"ps",
"-a"
],
"delta": "0:00:00.049834",
"end": "2024-08-10 12:48:02.900805",
"failed_when_result": false,
"rc": 0,
"start": "2024-08-10 12:48:02.850971"
}
STDOUT:
CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
77ecf6ddf9f0 quay.io/linux-system-roles/mysql:5.6 mysqld 34 seconds ago Up 34 seconds (healthy) quadlet-demo-mysql
50025198fd7a localhost/podman-pause:4.9.4-dev-1708535009 2 seconds ago Up 1 second a96f3a51b8d1-service
98ced3336e84 localhost/podman-pause:4.9.4-dev-1708535009 2 seconds ago Up 1 second 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp 1ed91f1b3155-infra
2c53e6e47092 quay.io/linux-system-roles/wordpress:4.8-apache apache2-foregroun... 2 seconds ago Up 1 second 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp quadlet-demo-wordpress
37c04bc75a0e quay.io/linux-system-roles/envoyproxy:v1.25.0 envoy -c /etc/env... 1 second ago Up 1 second 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp quadlet-demo-envoy
TASK [Check volumes] ***********************************************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:93
Saturday 10 August 2024 12:48:02 -0400 (0:00:00.493) 0:01:26.861 *******
ok: [managed_node1] => {
"changed": false,
"cmd": [
"podman",
"volume",
"ls"
],
"delta": "0:00:00.034856",
"end": "2024-08-10 12:48:03.422151",
"failed_when_result": false,
"rc": 0,
"start": "2024-08-10 12:48:03.387295"
}
STDOUT:
DRIVER VOLUME NAME
local systemd-quadlet-demo-mysql
local wp-pv-claim
local envoy-proxy-config
local envoy-certificates
TASK [Check pods] **************************************************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:98
Saturday 10 August 2024 12:48:03 -0400 (0:00:00.510) 0:01:27.372 *******
ok: [managed_node1] => {
"changed": false,
"cmd": [
"podman",
"pod",
"ps",
"--ctr-ids",
"--ctr-names",
"--ctr-status"
],
"delta": "0:00:00.045644",
"end": "2024-08-10 12:48:03.859279",
"failed_when_result": false,
"rc": 0,
"start": "2024-08-10 12:48:03.813635"
}
STDOUT:
POD ID NAME STATUS CREATED INFRA ID IDS NAMES STATUS
1ed91f1b3155 quadlet-demo Running 3 seconds ago 98ced3336e84 98ced3336e84,2c53e6e47092,37c04bc75a0e 1ed91f1b3155-infra,quadlet-demo-wordpress,quadlet-demo-envoy running,running,running
TASK [Check systemd] ***********************************************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:103
Saturday 10 August 2024 12:48:03 -0400 (0:00:00.439) 0:01:27.811 *******
ok: [managed_node1] => {
"changed": false,
"cmd": "set -euo pipefail; systemctl list-units | grep quadlet",
"delta": "0:00:00.012094",
"end": "2024-08-10 12:48:04.255888",
"failed_when_result": false,
"rc": 0,
"start": "2024-08-10 12:48:04.243794"
}
STDOUT:
quadlet-demo-mysql-volume.service loaded active exited quadlet-demo-mysql-volume.service
quadlet-demo-mysql.service loaded active running quadlet-demo-mysql.service
quadlet-demo-network.service loaded active exited quadlet-demo-network.service
quadlet-demo.service loaded active running quadlet-demo.service
TASK [Check web] ***************************************************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:109
Saturday 10 August 2024 12:48:04 -0400 (0:00:00.393) 0:01:28.204 *******
changed: [managed_node1] => {
"attempts": 1,
"changed": true,
"checksum_dest": null,
"checksum_src": "9dbc8c154daaed74c52aa781ab9994c0246a012f",
"dest": "/run/out",
"elapsed": 0,
"gid": 0,
"group": "root",
"md5sum": "f59bccb13786ba6aa549360a06d6b1b4",
"mode": "0600",
"owner": "root",
"secontext": "system_u:object_r:var_run_t:s0",
"size": 11666,
"src": "/root/.ansible/tmp/ansible-tmp-1723308484.3624122-29059-203662362658850/tmpqwd0mt0k",
"state": "file",
"status_code": 200,
"uid": 0,
"url": "https://localhost:8000"
}
MSG:
OK (unknown bytes)
TASK [Show web] ****************************************************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:120
Saturday 10 August 2024 12:48:05 -0400 (0:00:01.198) 0:01:29.403 *******
ok: [managed_node1] => {
"changed": false,
"cmd": [
"cat",
"/run/out"
],
"delta": "0:00:00.002572",
"end": "2024-08-10 12:48:05.822946",
"rc": 0,
"start": "2024-08-10 12:48:05.820374"
}
STDOUT:
WordPress › Installation
WordPress
TASK [Error] *******************************************************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:125
Saturday 10 August 2024 12:48:05 -0400 (0:00:00.368) 0:01:29.772 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [Check] *******************************************************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:136
Saturday 10 August 2024 12:48:05 -0400 (0:00:00.032) 0:01:29.804 *******
ok: [managed_node1] => {
"changed": false,
"cmd": [
"podman",
"ps",
"-a"
],
"delta": "0:00:00.042091",
"end": "2024-08-10 12:48:06.259021",
"rc": 0,
"start": "2024-08-10 12:48:06.216930"
}
STDOUT:
CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
77ecf6ddf9f0 quay.io/linux-system-roles/mysql:5.6 mysqld 38 seconds ago Up 38 seconds (healthy) quadlet-demo-mysql
50025198fd7a localhost/podman-pause:4.9.4-dev-1708535009 5 seconds ago Up 5 seconds a96f3a51b8d1-service
98ced3336e84 localhost/podman-pause:4.9.4-dev-1708535009 5 seconds ago Up 5 seconds 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp 1ed91f1b3155-infra
2c53e6e47092 quay.io/linux-system-roles/wordpress:4.8-apache apache2-foregroun... 5 seconds ago Up 5 seconds 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp quadlet-demo-wordpress
37c04bc75a0e quay.io/linux-system-roles/envoyproxy:v1.25.0 envoy -c /etc/env... 5 seconds ago Up 5 seconds 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp quadlet-demo-envoy
TASK [Check pods] **************************************************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:140
Saturday 10 August 2024 12:48:06 -0400 (0:00:00.401) 0:01:30.206 *******
ok: [managed_node1] => {
"changed": false,
"cmd": [
"podman",
"pod",
"ps",
"--ctr-ids",
"--ctr-names",
"--ctr-status"
],
"delta": "0:00:00.041445",
"end": "2024-08-10 12:48:06.661040",
"failed_when_result": false,
"rc": 0,
"start": "2024-08-10 12:48:06.619595"
}
STDOUT:
POD ID NAME STATUS CREATED INFRA ID IDS NAMES STATUS
1ed91f1b3155 quadlet-demo Running 5 seconds ago 98ced3336e84 98ced3336e84,2c53e6e47092,37c04bc75a0e 1ed91f1b3155-infra,quadlet-demo-wordpress,quadlet-demo-envoy running,running,running
TASK [Check systemd] ***********************************************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:145
Saturday 10 August 2024 12:48:06 -0400 (0:00:00.401) 0:01:30.608 *******
ok: [managed_node1] => {
"changed": false,
"cmd": "set -euo pipefail; systemctl list-units --all | grep quadlet",
"delta": "0:00:00.011020",
"end": "2024-08-10 12:48:07.032801",
"failed_when_result": false,
"rc": 0,
"start": "2024-08-10 12:48:07.021781"
}
STDOUT:
quadlet-demo-mysql-volume.service loaded active exited quadlet-demo-mysql-volume.service
quadlet-demo-mysql.service loaded active running quadlet-demo-mysql.service
quadlet-demo-network.service loaded active exited quadlet-demo-network.service
quadlet-demo.service loaded active running quadlet-demo.service
TASK [LS] **********************************************************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:153
Saturday 10 August 2024 12:48:07 -0400 (0:00:00.372) 0:01:30.980 *******
ok: [managed_node1] => {
"changed": false,
"cmd": [
"ls",
"-alrtF",
"/etc/systemd/system"
],
"delta": "0:00:00.003725",
"end": "2024-08-10 12:48:07.401493",
"failed_when_result": false,
"rc": 0,
"start": "2024-08-10 12:48:07.397768"
}
STDOUT:
total 12
lrwxrwxrwx. 1 root root 9 May 11 2019 systemd-timedated.service -> /dev/null
drwxr-xr-x. 4 root root 169 May 29 03:45 ../
lrwxrwxrwx. 1 root root 39 May 29 03:45 syslog.service -> /usr/lib/systemd/system/rsyslog.service
drwxr-xr-x. 2 root root 32 May 29 03:45 getty.target.wants/
lrwxrwxrwx. 1 root root 37 May 29 03:45 ctrl-alt-del.target -> /usr/lib/systemd/system/reboot.target
lrwxrwxrwx. 1 root root 57 May 29 03:45 dbus-org.freedesktop.nm-dispatcher.service -> /usr/lib/systemd/system/NetworkManager-dispatcher.service
drwxr-xr-x. 2 root root 48 May 29 03:45 network-online.target.wants/
lrwxrwxrwx. 1 root root 41 May 29 03:45 dbus-org.freedesktop.timedate1.service -> /usr/lib/systemd/system/timedatex.service
drwxr-xr-x. 2 root root 61 May 29 03:45 timers.target.wants/
drwxr-xr-x. 2 root root 31 May 29 03:45 basic.target.wants/
drwxr-xr-x. 2 root root 38 May 29 03:45 dev-virtio\x2dports-org.qemu.guest_agent.0.device.wants/
lrwxrwxrwx. 1 root root 41 May 29 03:47 default.target -> /usr/lib/systemd/system/multi-user.target
drwxr-xr-x. 2 root root 51 May 29 03:55 sockets.target.wants/
drwxr-xr-x. 2 root root 31 May 29 03:55 remote-fs.target.wants/
drwxr-xr-x. 2 root root 59 May 29 03:55 sshd-keygen@.service.d/
drwxr-xr-x. 2 root root 119 May 29 03:55 cloud-init.target.wants/
drwxr-xr-x. 2 root root 181 May 29 03:56 sysinit.target.wants/
drwxr-xr-x. 2 root root 4096 Aug 10 12:34 multi-user.target.wants/
lrwxrwxrwx. 1 root root 41 Aug 10 12:34 dbus-org.fedoraproject.FirewallD1.service -> /usr/lib/systemd/system/firewalld.service
-rw-r--r--. 1 root root 99 Aug 10 12:41 user-0.slice
drwxr-xr-x. 2 root root 27 Aug 10 12:41 user@.service.d/
drwxr-xr-x. 15 root root 4096 Aug 10 12:41 ./
drwxr-xr-x. 2 root root 27 Aug 10 12:41 user-.slice.d/
TASK [Cleanup] *****************************************************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:160
Saturday 10 August 2024 12:48:07 -0400 (0:00:00.368) 0:01:31.349 *******
TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3
Saturday 10 August 2024 12:48:07 -0400 (0:00:00.099) 0:01:31.449 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] ****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3
Saturday 10 August 2024 12:48:07 -0400 (0:00:00.061) 0:01:31.510 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11
Saturday 10 August 2024 12:48:07 -0400 (0:00:00.041) 0:01:31.551 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16
Saturday 10 August 2024 12:48:07 -0400 (0:00:00.033) 0:01:31.585 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:20
Saturday 10 August 2024 12:48:07 -0400 (0:00:00.034) 0:01:31.619 *******
ok: [managed_node1] => (item=RedHat.yml) => {
"ansible_facts": {
"__podman_packages": [
"podman",
"shadow-utils-subid"
]
},
"ansible_included_var_files": [
"/tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml"
],
"ansible_loop_var": "item",
"changed": false,
"item": "RedHat.yml"
}
skipping: [managed_node1] => (item=CentOS.yml) => {
"ansible_loop_var": "item",
"changed": false,
"item": "CentOS.yml",
"skip_reason": "Conditional result was False"
}
ok: [managed_node1] => (item=CentOS_8.yml) => {
"ansible_facts": {
"__podman_packages": [
"crun",
"podman",
"podman-plugins",
"shadow-utils-subid"
]
},
"ansible_included_var_files": [
"/tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml"
],
"ansible_loop_var": "item",
"changed": false,
"item": "CentOS_8.yml"
}
ok: [managed_node1] => (item=CentOS_8.yml) => {
"ansible_facts": {
"__podman_packages": [
"crun",
"podman",
"podman-plugins",
"shadow-utils-subid"
]
},
"ansible_included_var_files": [
"/tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml"
],
"ansible_loop_var": "item",
"changed": false,
"item": "CentOS_8.yml"
}
TASK [fedora.linux_system_roles.podman : Gather the package facts] *************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6
Saturday 10 August 2024 12:48:07 -0400 (0:00:00.086) 0:01:31.705 *******
ok: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Enable copr if requested] *************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10
Saturday 10 August 2024 12:48:09 -0400 (0:00:01.519) 0:01:33.225 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14
Saturday 10 August 2024 12:48:09 -0400 (0:00:00.063) 0:01:33.288 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get podman version] *******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:22
Saturday 10 August 2024 12:48:09 -0400 (0:00:00.040) 0:01:33.328 *******
ok: [managed_node1] => {
"changed": false,
"cmd": [
"podman",
"--version"
],
"delta": "0:00:00.028737",
"end": "2024-08-10 12:48:09.776680",
"rc": 0,
"start": "2024-08-10 12:48:09.747943"
}
STDOUT:
podman version 4.9.4-dev
TASK [fedora.linux_system_roles.podman : Set podman version] *******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28
Saturday 10 August 2024 12:48:09 -0400 (0:00:00.397) 0:01:33.726 *******
ok: [managed_node1] => {
"ansible_facts": {
"podman_version": "4.9.4-dev"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:32
Saturday 10 August 2024 12:48:09 -0400 (0:00:00.035) 0:01:33.761 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:39
Saturday 10 August 2024 12:48:09 -0400 (0:00:00.031) 0:01:33.793 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
META: end_host conditional evaluated to false, continuing execution for managed_node1
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56
Saturday 10 August 2024 12:48:09 -0400 (0:00:00.063) 0:01:33.856 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 10 August 2024 12:48:10 -0400 (0:00:00.069) 0:01:33.926 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 10 August 2024 12:48:10 -0400 (0:00:00.040) 0:01:33.967 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 10 August 2024 12:48:10 -0400 (0:00:00.040) 0:01:34.007 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get group information] ****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Saturday 10 August 2024 12:48:10 -0400 (0:00:00.044) 0:01:34.051 *******
ok: [managed_node1] => {
"ansible_facts": {
"getent_group": {
"root": [
"x",
"0",
""
]
}
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set group name] ***********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35
Saturday 10 August 2024 12:48:10 -0400 (0:00:00.386) 0:01:34.438 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_group_name": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Saturday 10 August 2024 12:48:10 -0400 (0:00:00.042) 0:01:34.480 *******
ok: [managed_node1] => {
"changed": false,
"stat": {
"atime": 1723307429.1990302,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b",
"ctime": 1723307390.9915028,
"dev": 51713,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 6986657,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-sharedlib",
"mode": "0755",
"mtime": 1700557386.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 12640,
"uid": 0,
"version": "1934096266",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check user with getsubids] ************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:50
Saturday 10 August 2024 12:48:11 -0400 (0:00:00.404) 0:01:34.885 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check group with getsubids] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:55
Saturday 10 August 2024 12:48:11 -0400 (0:00:00.033) 0:01:34.919 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:60
Saturday 10 August 2024 12:48:11 -0400 (0:00:00.034) 0:01:34.953 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74
Saturday 10 August 2024 12:48:11 -0400 (0:00:00.034) 0:01:34.988 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79
Saturday 10 August 2024 12:48:11 -0400 (0:00:00.032) 0:01:35.020 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:84
Saturday 10 August 2024 12:48:11 -0400 (0:00:00.032) 0:01:35.053 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:94
Saturday 10 August 2024 12:48:11 -0400 (0:00:00.032) 0:01:35.086 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if group not in subgid file] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:101
Saturday 10 August 2024 12:48:11 -0400 (0:00:00.032) 0:01:35.119 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set config file paths] ****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:62
Saturday 10 August 2024 12:48:11 -0400 (0:00:00.032) 0:01:35.151 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf",
"__podman_policy_json_file": "/etc/containers/policy.json",
"__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf",
"__podman_storage_conf_file": "/etc/containers/storage.conf"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle container.conf.d] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:71
Saturday 10 August 2024 12:48:11 -0400 (0:00:00.043) 0:01:35.194 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5
Saturday 10 August 2024 12:48:11 -0400 (0:00:00.066) 0:01:35.261 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Update container config file] *********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13
Saturday 10 August 2024 12:48:11 -0400 (0:00:00.033) 0:01:35.294 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] *************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:74
Saturday 10 August 2024 12:48:11 -0400 (0:00:00.032) 0:01:35.326 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5
Saturday 10 August 2024 12:48:11 -0400 (0:00:00.067) 0:01:35.394 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Update registries config file] ********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13
Saturday 10 August 2024 12:48:11 -0400 (0:00:00.032) 0:01:35.427 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Handle storage.conf] ******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:77
Saturday 10 August 2024 12:48:11 -0400 (0:00:00.032) 0:01:35.460 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5
Saturday 10 August 2024 12:48:11 -0400 (0:00:00.100) 0:01:35.560 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Update storage config file] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13
Saturday 10 August 2024 12:48:11 -0400 (0:00:00.033) 0:01:35.594 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Handle policy.json] *******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80
Saturday 10 August 2024 12:48:11 -0400 (0:00:00.033) 0:01:35.627 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6
Saturday 10 August 2024 12:48:11 -0400 (0:00:00.071) 0:01:35.698 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14
Saturday 10 August 2024 12:48:11 -0400 (0:00:00.032) 0:01:35.731 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get the existing policy.json] *********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19
Saturday 10 August 2024 12:48:11 -0400 (0:00:00.033) 0:01:35.765 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Write new policy.json file] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25
Saturday 10 August 2024 12:48:11 -0400 (0:00:00.033) 0:01:35.798 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [Manage firewall for specified ports] *************************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:86
Saturday 10 August 2024 12:48:11 -0400 (0:00:00.032) 0:01:35.830 *******
TASK [fedora.linux_system_roles.firewall : Setup firewalld] ********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2
Saturday 10 August 2024 12:48:12 -0400 (0:00:00.104) 0:01:35.935 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed_node1
TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2
Saturday 10 August 2024 12:48:12 -0400 (0:00:00.062) 0:01:35.997 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Check if system is ostree] **********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10
Saturday 10 August 2024 12:48:12 -0400 (0:00:00.041) 0:01:36.039 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15
Saturday 10 August 2024 12:48:12 -0400 (0:00:00.032) 0:01:36.072 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22
Saturday 10 August 2024 12:48:12 -0400 (0:00:00.033) 0:01:36.106 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27
Saturday 10 August 2024 12:48:12 -0400 (0:00:00.033) 0:01:36.140 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Install firewalld] ******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31
Saturday 10 August 2024 12:48:12 -0400 (0:00:00.068) 0:01:36.208 *******
ok: [managed_node1] => {
"changed": false,
"rc": 0,
"results": []
}
MSG:
Nothing to do
lsrpackages: firewalld
TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:43
Saturday 10 August 2024 12:48:15 -0400 (0:00:03.088) 0:01:39.296 *******
skipping: [managed_node1] => {}
TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:48
Saturday 10 August 2024 12:48:15 -0400 (0:00:00.033) 0:01:39.329 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:53
Saturday 10 August 2024 12:48:15 -0400 (0:00:00.033) 0:01:39.362 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Collect service facts] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5
Saturday 10 August 2024 12:48:15 -0400 (0:00:00.031) 0:01:39.394 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:9
Saturday 10 August 2024 12:48:15 -0400 (0:00:00.057) 0:01:39.451 *******
skipping: [managed_node1] => (item=nftables) => {
"ansible_loop_var": "item",
"changed": false,
"item": "nftables",
"skip_reason": "Conditional result was False"
}
skipping: [managed_node1] => (item=iptables) => {
"ansible_loop_var": "item",
"changed": false,
"item": "iptables",
"skip_reason": "Conditional result was False"
}
skipping: [managed_node1] => (item=ufw) => {
"ansible_loop_var": "item",
"changed": false,
"item": "ufw",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22
Saturday 10 August 2024 12:48:15 -0400 (0:00:00.044) 0:01:39.496 *******
ok: [managed_node1] => {
"changed": false,
"name": "firewalld",
"status": {
"ActiveEnterTimestamp": "Sat 2024-08-10 12:42:21 EDT",
"ActiveEnterTimestampMonotonic": "6667579",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "active",
"After": "basic.target polkit.service sysinit.target system.slice dbus.socket dbus.service",
"AllowIsolate": "no",
"AllowedCPUs": "",
"AllowedMemoryNodes": "",
"AmbientCapabilities": "",
"AssertResult": "yes",
"AssertTimestamp": "Sat 2024-08-10 12:42:20 EDT",
"AssertTimestampMonotonic": "5273975",
"Before": "network-pre.target multi-user.target shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"BusName": "org.fedoraproject.FirewallD1",
"CPUAccounting": "no",
"CPUAffinity": "",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "yes",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf",
"CollectMode": "inactive",
"ConditionResult": "yes",
"ConditionTimestamp": "Sat 2024-08-10 12:42:20 EDT",
"ConditionTimestampMonotonic": "5273973",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "ip6tables.service iptables.service nftables.service ebtables.service ipset.service shutdown.target",
"ControlGroup": "/system.slice/firewalld.service",
"ControlPID": "0",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "firewalld - dynamic firewall daemon",
"DevicePolicy": "auto",
"Documentation": "man:firewalld(1)",
"DynamicUser": "no",
"EffectiveCPUs": "0-1",
"EffectiveMemoryNodes": "0",
"EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "582",
"ExecMainStartTimestamp": "Sat 2024-08-10 12:42:20 EDT",
"ExecMainStartTimestampMonotonic": "5276069",
"ExecMainStatus": "0",
"ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FragmentPath": "/usr/lib/systemd/system/firewalld.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOSchedulingClass": "0",
"IOSchedulingPriority": "0",
"IOWeight": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "18446744073709551615",
"IPEgressPackets": "18446744073709551615",
"IPIngressBytes": "18446744073709551615",
"IPIngressPackets": "18446744073709551615",
"Id": "firewalld.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestamp": "Sat 2024-08-10 12:42:20 EDT",
"InactiveExitTimestampMonotonic": "5276133",
"InvocationID": "cdc8f49a1be047faa5ce1c8fd76ca6b9",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "0",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "65536",
"LimitMEMLOCKSoft": "65536",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "262144",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14003",
"LimitNPROCSoft": "14003",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14003",
"LimitSIGPENDINGSoft": "14003",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "582",
"MemoryAccounting": "yes",
"MemoryCurrent": "50790400",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"MountFlags": "",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAMask": "",
"NUMAPolicy": "n/a",
"Names": "firewalld.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"PermissionsStartOnly": "no",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "sysinit.target system.slice dbus.socket",
"Restart": "no",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"StandardError": "null",
"StandardInput": "null",
"StandardInputData": "",
"StandardOutput": "null",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestamp": "Sat 2024-08-10 12:42:21 EDT",
"StateChangeTimestampMonotonic": "6667579",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "running",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "0",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "2",
"TasksMax": "22405",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "dbus",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "enabled",
"UnitFileState": "enabled",
"UtmpMode": "init",
"WantedBy": "multi-user.target",
"Wants": "network-pre.target",
"WatchdogTimestamp": "Sat 2024-08-10 12:42:21 EDT",
"WatchdogTimestampMonotonic": "6667576",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28
Saturday 10 August 2024 12:48:16 -0400 (0:00:00.513) 0:01:40.010 *******
ok: [managed_node1] => {
"changed": false,
"enabled": true,
"name": "firewalld",
"state": "started",
"status": {
"ActiveEnterTimestamp": "Sat 2024-08-10 12:42:21 EDT",
"ActiveEnterTimestampMonotonic": "6667579",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "active",
"After": "basic.target polkit.service sysinit.target system.slice dbus.socket dbus.service",
"AllowIsolate": "no",
"AllowedCPUs": "",
"AllowedMemoryNodes": "",
"AmbientCapabilities": "",
"AssertResult": "yes",
"AssertTimestamp": "Sat 2024-08-10 12:42:20 EDT",
"AssertTimestampMonotonic": "5273975",
"Before": "network-pre.target multi-user.target shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"BusName": "org.fedoraproject.FirewallD1",
"CPUAccounting": "no",
"CPUAffinity": "",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "yes",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf",
"CollectMode": "inactive",
"ConditionResult": "yes",
"ConditionTimestamp": "Sat 2024-08-10 12:42:20 EDT",
"ConditionTimestampMonotonic": "5273973",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "ip6tables.service iptables.service nftables.service ebtables.service ipset.service shutdown.target",
"ControlGroup": "/system.slice/firewalld.service",
"ControlPID": "0",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "firewalld - dynamic firewall daemon",
"DevicePolicy": "auto",
"Documentation": "man:firewalld(1)",
"DynamicUser": "no",
"EffectiveCPUs": "0-1",
"EffectiveMemoryNodes": "0",
"EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "582",
"ExecMainStartTimestamp": "Sat 2024-08-10 12:42:20 EDT",
"ExecMainStartTimestampMonotonic": "5276069",
"ExecMainStatus": "0",
"ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FragmentPath": "/usr/lib/systemd/system/firewalld.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOSchedulingClass": "0",
"IOSchedulingPriority": "0",
"IOWeight": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "18446744073709551615",
"IPEgressPackets": "18446744073709551615",
"IPIngressBytes": "18446744073709551615",
"IPIngressPackets": "18446744073709551615",
"Id": "firewalld.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestamp": "Sat 2024-08-10 12:42:20 EDT",
"InactiveExitTimestampMonotonic": "5276133",
"InvocationID": "cdc8f49a1be047faa5ce1c8fd76ca6b9",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "0",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "65536",
"LimitMEMLOCKSoft": "65536",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "262144",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14003",
"LimitNPROCSoft": "14003",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14003",
"LimitSIGPENDINGSoft": "14003",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "582",
"MemoryAccounting": "yes",
"MemoryCurrent": "50790400",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"MountFlags": "",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAMask": "",
"NUMAPolicy": "n/a",
"Names": "firewalld.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"PermissionsStartOnly": "no",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "sysinit.target system.slice dbus.socket",
"Restart": "no",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"StandardError": "null",
"StandardInput": "null",
"StandardInputData": "",
"StandardOutput": "null",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestamp": "Sat 2024-08-10 12:42:21 EDT",
"StateChangeTimestampMonotonic": "6667579",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "running",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "0",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "2",
"TasksMax": "22405",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "dbus",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "enabled",
"UnitFileState": "enabled",
"UtmpMode": "init",
"WantedBy": "multi-user.target",
"Wants": "network-pre.target",
"WatchdogTimestamp": "Sat 2024-08-10 12:42:21 EDT",
"WatchdogTimestampMonotonic": "6667576",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:34
Saturday 10 August 2024 12:48:16 -0400 (0:00:00.504) 0:01:40.514 *******
ok: [managed_node1] => {
"ansible_facts": {
"__firewall_previous_replaced": false,
"__firewall_python_cmd": "/usr/libexec/platform-python",
"__firewall_report_changed": true
},
"changed": false
}
TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:43
Saturday 10 August 2024 12:48:16 -0400 (0:00:00.039) 0:01:40.553 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:55
Saturday 10 August 2024 12:48:16 -0400 (0:00:00.031) 0:01:40.585 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Configure firewall] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71
Saturday 10 August 2024 12:48:16 -0400 (0:00:00.033) 0:01:40.618 *******
ok: [managed_node1] => (item={'port': '8000/tcp', 'state': 'enabled'}) => {
"__firewall_changed": false,
"ansible_loop_var": "item",
"changed": false,
"item": {
"port": "8000/tcp",
"state": "enabled"
}
}
ok: [managed_node1] => (item={'port': '9000/tcp', 'state': 'enabled'}) => {
"__firewall_changed": false,
"ansible_loop_var": "item",
"changed": false,
"item": {
"port": "9000/tcp",
"state": "enabled"
}
}
TASK [fedora.linux_system_roles.firewall : Gather firewall config information] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:120
Saturday 10 August 2024 12:48:17 -0400 (0:00:01.200) 0:01:41.818 *******
skipping: [managed_node1] => (item={'port': '8000/tcp', 'state': 'enabled'}) => {
"ansible_loop_var": "item",
"changed": false,
"item": {
"port": "8000/tcp",
"state": "enabled"
},
"skip_reason": "Conditional result was False"
}
skipping: [managed_node1] => (item={'port': '9000/tcp', 'state': 'enabled'}) => {
"ansible_loop_var": "item",
"changed": false,
"item": {
"port": "9000/tcp",
"state": "enabled"
},
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] *******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130
Saturday 10 August 2024 12:48:17 -0400 (0:00:00.053) 0:01:41.872 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:139
Saturday 10 August 2024 12:48:18 -0400 (0:00:00.063) 0:01:41.936 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] *******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:144
Saturday 10 August 2024 12:48:18 -0400 (0:00:00.034) 0:01:41.971 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:153
Saturday 10 August 2024 12:48:18 -0400 (0:00:00.034) 0:01:42.006 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Calculate what has changed] *********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:163
Saturday 10 August 2024 12:48:18 -0400 (0:00:00.032) 0:01:42.038 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Show diffs] *************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:169
Saturday 10 August 2024 12:48:18 -0400 (0:00:00.032) 0:01:42.071 *******
skipping: [managed_node1] => {}
TASK [Manage selinux for specified ports] **************************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:93
Saturday 10 August 2024 12:48:18 -0400 (0:00:00.031) 0:01:42.102 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:100
Saturday 10 August 2024 12:48:18 -0400 (0:00:00.032) 0:01:42.135 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_cancel_user_linger": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] *******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:104
Saturday 10 August 2024 12:48:18 -0400 (0:00:00.032) 0:01:42.167 *******
skipping: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle credential files - present] ****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:113
Saturday 10 August 2024 12:48:18 -0400 (0:00:00.031) 0:01:42.198 *******
skipping: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle secrets] ***********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:122
Saturday 10 August 2024 12:48:18 -0400 (0:00:00.030) 0:01:42.229 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed_node1
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed_node1
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Set variables part 1] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3
Saturday 10 August 2024 12:48:18 -0400 (0:00:00.101) 0:01:42.331 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set variables part 2] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7
Saturday 10 August 2024 12:48:18 -0400 (0:00:00.035) 0:01:42.366 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_rootless": false,
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:13
Saturday 10 August 2024 12:48:18 -0400 (0:00:00.042) 0:01:42.409 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 10 August 2024 12:48:18 -0400 (0:00:00.052) 0:01:42.462 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 10 August 2024 12:48:18 -0400 (0:00:00.032) 0:01:42.494 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 10 August 2024 12:48:18 -0400 (0:00:00.032) 0:01:42.527 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:18
Saturday 10 August 2024 12:48:18 -0400 (0:00:00.032) 0:01:42.559 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Manage each secret] *******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:34
Saturday 10 August 2024 12:48:18 -0400 (0:00:00.064) 0:01:42.623 *******
[WARNING]: Using a variable for a task's 'args' is unsafe in some situations
(see
https://docs.ansible.com/ansible/devel/reference_appendices/faq.html#argsplat-
unsafe)
changed: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Set variables part 1] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3
Saturday 10 August 2024 12:48:19 -0400 (0:00:00.415) 0:01:43.038 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set variables part 2] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7
Saturday 10 August 2024 12:48:19 -0400 (0:00:00.034) 0:01:43.073 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_rootless": false,
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:13
Saturday 10 August 2024 12:48:19 -0400 (0:00:00.042) 0:01:43.116 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 10 August 2024 12:48:19 -0400 (0:00:00.053) 0:01:43.170 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 10 August 2024 12:48:19 -0400 (0:00:00.033) 0:01:43.203 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 10 August 2024 12:48:19 -0400 (0:00:00.032) 0:01:43.236 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:18
Saturday 10 August 2024 12:48:19 -0400 (0:00:00.032) 0:01:43.268 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Manage each secret] *******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:34
Saturday 10 August 2024 12:48:19 -0400 (0:00:00.032) 0:01:43.300 *******
[WARNING]: Using a variable for a task's 'args' is unsafe in some situations
(see
https://docs.ansible.com/ansible/devel/reference_appendices/faq.html#argsplat-
unsafe)
changed: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Set variables part 1] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3
Saturday 10 August 2024 12:48:19 -0400 (0:00:00.415) 0:01:43.716 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set variables part 2] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7
Saturday 10 August 2024 12:48:19 -0400 (0:00:00.034) 0:01:43.751 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_rootless": false,
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:13
Saturday 10 August 2024 12:48:19 -0400 (0:00:00.044) 0:01:43.796 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 10 August 2024 12:48:19 -0400 (0:00:00.053) 0:01:43.850 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 10 August 2024 12:48:20 -0400 (0:00:00.063) 0:01:43.913 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 10 August 2024 12:48:20 -0400 (0:00:00.033) 0:01:43.946 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:18
Saturday 10 August 2024 12:48:20 -0400 (0:00:00.032) 0:01:43.979 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Manage each secret] *******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:34
Saturday 10 August 2024 12:48:20 -0400 (0:00:00.032) 0:01:44.011 *******
[WARNING]: Using a variable for a task's 'args' is unsafe in some situations
(see
https://docs.ansible.com/ansible/devel/reference_appendices/faq.html#argsplat-
unsafe)
changed: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129
Saturday 10 August 2024 12:48:20 -0400 (0:00:00.426) 0:01:44.438 *******
skipping: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:136
Saturday 10 August 2024 12:48:20 -0400 (0:00:00.032) 0:01:44.470 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed_node1
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed_node1
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed_node1
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed_node1
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed_node1
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Saturday 10 August 2024 12:48:20 -0400 (0:00:00.158) 0:01:44.628 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "quadlet-demo.kube",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Unit]\nRequires=quadlet-demo-mysql.service\nAfter=quadlet-demo-mysql.service\n\n[Kube]\n# Point to the yaml file in the same directory\nYaml=quadlet-demo.yml\n# Use the quadlet-demo network\nNetwork=quadlet-demo.network\n# Publish the envoy proxy data port\nPublishPort=8000:8080\n# Publish the envoy proxy admin port\nPublishPort=9000:9901\n# Use the envoy proxy config map in the same directory\nConfigMap=envoy-proxy-configmap.yml",
"__podman_quadlet_template_src": ""
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Saturday 10 August 2024 12:48:20 -0400 (0:00:00.045) 0:01:44.674 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "absent",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Saturday 10 August 2024 12:48:20 -0400 (0:00:00.041) 0:01:44.716 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Saturday 10 August 2024 12:48:20 -0400 (0:00:00.034) 0:01:44.750 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo",
"__podman_quadlet_type": "kube",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Saturday 10 August 2024 12:48:20 -0400 (0:00:00.047) 0:01:44.798 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 10 August 2024 12:48:20 -0400 (0:00:00.064) 0:01:44.862 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 10 August 2024 12:48:21 -0400 (0:00:00.039) 0:01:44.902 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 10 August 2024 12:48:21 -0400 (0:00:00.075) 0:01:44.977 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get group information] ****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Saturday 10 August 2024 12:48:21 -0400 (0:00:00.045) 0:01:45.023 *******
ok: [managed_node1] => {
"ansible_facts": {
"getent_group": {
"root": [
"x",
"0",
""
]
}
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set group name] ***********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35
Saturday 10 August 2024 12:48:21 -0400 (0:00:00.381) 0:01:45.405 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_group_name": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Saturday 10 August 2024 12:48:21 -0400 (0:00:00.042) 0:01:45.447 *******
ok: [managed_node1] => {
"changed": false,
"stat": {
"atime": 1723307429.1990302,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b",
"ctime": 1723307390.9915028,
"dev": 51713,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 6986657,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-sharedlib",
"mode": "0755",
"mtime": 1700557386.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 12640,
"uid": 0,
"version": "1934096266",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check user with getsubids] ************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:50
Saturday 10 August 2024 12:48:21 -0400 (0:00:00.374) 0:01:45.821 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check group with getsubids] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:55
Saturday 10 August 2024 12:48:21 -0400 (0:00:00.034) 0:01:45.856 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:60
Saturday 10 August 2024 12:48:22 -0400 (0:00:00.035) 0:01:45.891 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74
Saturday 10 August 2024 12:48:22 -0400 (0:00:00.032) 0:01:45.924 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79
Saturday 10 August 2024 12:48:22 -0400 (0:00:00.033) 0:01:45.957 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:84
Saturday 10 August 2024 12:48:22 -0400 (0:00:00.035) 0:01:45.992 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:94
Saturday 10 August 2024 12:48:22 -0400 (0:00:00.033) 0:01:46.026 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if group not in subgid file] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:101
Saturday 10 August 2024 12:48:22 -0400 (0:00:00.034) 0:01:46.060 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Saturday 10 August 2024 12:48:22 -0400 (0:00:00.033) 0:01:46.094 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": [
"quadlet-demo.yml"
],
"__podman_service_name": "quadlet-demo.service",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Saturday 10 August 2024 12:48:22 -0400 (0:00:00.063) 0:01:46.157 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Saturday 10 August 2024 12:48:22 -0400 (0:00:00.036) 0:01:46.194 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Saturday 10 August 2024 12:48:22 -0400 (0:00:00.031) 0:01:46.225 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_images": [],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.kube",
"__podman_volumes": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:103
Saturday 10 August 2024 12:48:22 -0400 (0:00:00.111) 0:01:46.337 *******
ok: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:110
Saturday 10 August 2024 12:48:22 -0400 (0:00:00.038) 0:01:46.376 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4
Saturday 10 August 2024 12:48:22 -0400 (0:00:00.081) 0:01:46.458 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stop and disable service] *************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12
Saturday 10 August 2024 12:48:22 -0400 (0:00:00.032) 0:01:46.490 *******
changed: [managed_node1] => {
"changed": true,
"enabled": false,
"failed_when_result": false,
"name": "quadlet-demo.service",
"state": "stopped",
"status": {
"ActiveEnterTimestamp": "Sat 2024-08-10 12:48:01 EDT",
"ActiveEnterTimestampMonotonic": "346856163",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "active",
"After": "systemd-journald.socket quadlet-demo-mysql.service quadlet-demo-network.service sysinit.target basic.target -.mount system.slice",
"AllowIsolate": "no",
"AllowedCPUs": "",
"AllowedMemoryNodes": "",
"AmbientCapabilities": "",
"AssertResult": "yes",
"AssertTimestamp": "Sat 2024-08-10 12:48:00 EDT",
"AssertTimestampMonotonic": "345522765",
"Before": "shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "no",
"CPUAffinity": "",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf",
"CollectMode": "inactive",
"ConditionResult": "yes",
"ConditionTimestamp": "Sat 2024-08-10 12:48:00 EDT",
"ConditionTimestampMonotonic": "345522764",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlGroup": "/system.slice/quadlet-demo.service",
"ControlPID": "0",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "quadlet-demo.service",
"DevicePolicy": "auto",
"DynamicUser": "no",
"EffectiveCPUs": "0-1",
"EffectiveMemoryNodes": "0",
"Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo.service",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "60066",
"ExecMainStartTimestamp": "Sat 2024-08-10 12:48:01 EDT",
"ExecMainStartTimestampMonotonic": "346856137",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network=systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[Sat 2024-08-10 12:48:00 EDT] ; stop_time=[n/a] ; pid=59977 ; code=(null) ; status=0/0 }",
"ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FragmentPath": "/run/systemd/generator/quadlet-demo.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOSchedulingClass": "0",
"IOSchedulingPriority": "0",
"IOWeight": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "18446744073709551615",
"IPEgressPackets": "18446744073709551615",
"IPIngressBytes": "18446744073709551615",
"IPIngressPackets": "18446744073709551615",
"Id": "quadlet-demo.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestamp": "Sat 2024-08-10 12:48:00 EDT",
"InactiveExitTimestampMonotonic": "345524026",
"InvocationID": "09e889c618b04ea4b457320cde6ac0e1",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "0",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "65536",
"LimitMEMLOCKSoft": "65536",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "262144",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14003",
"LimitNPROCSoft": "14003",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14003",
"LimitSIGPENDINGSoft": "14003",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "60066",
"MemoryAccounting": "yes",
"MemoryCurrent": "6184960",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"MountFlags": "",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAMask": "",
"NUMAPolicy": "n/a",
"Names": "quadlet-demo.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "all",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"PermissionsStartOnly": "no",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "system.slice quadlet-demo-mysql.service sysinit.target quadlet-demo-network.service -.mount",
"RequiresMountsFor": "/run/containers",
"Restart": "no",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"SourcePath": "/etc/containers/systemd/quadlet-demo.kube",
"StandardError": "inherit",
"StandardInput": "null",
"StandardInputData": "",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestamp": "Sat 2024-08-10 12:48:01 EDT",
"StateChangeTimestampMonotonic": "346856163",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "running",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogIdentifier": "quadlet-demo",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "0",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "4",
"TasksMax": "22405",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "notify",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "generated",
"UtmpMode": "init",
"WatchdogTimestamp": "Sat 2024-08-10 12:48:01 EDT",
"WatchdogTimestampMonotonic": "346856160",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.podman : See if quadlet file exists] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33
Saturday 10 August 2024 12:48:23 -0400 (0:00:01.290) 0:01:47.781 *******
ok: [managed_node1] => {
"changed": false,
"stat": {
"atime": 1723308479.8209777,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 8,
"charset": "us-ascii",
"checksum": "7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7",
"ctime": 1723308479.2069716,
"dev": 51713,
"device_type": 0,
"executable": false,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 18875426,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "text/plain",
"mode": "0644",
"mtime": 1723308478.8729687,
"nlink": 1,
"path": "/etc/containers/systemd/quadlet-demo.kube",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 456,
"uid": 0,
"version": "4014614056",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": false,
"xoth": false,
"xusr": false
}
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38
Saturday 10 August 2024 12:48:24 -0400 (0:00:00.393) 0:01:48.174 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Slurp quadlet file] *******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6
Saturday 10 August 2024 12:48:24 -0400 (0:00:00.062) 0:01:48.236 *******
ok: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12
Saturday 10 August 2024 12:48:24 -0400 (0:00:00.363) 0:01:48.599 *******
ok: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44
Saturday 10 August 2024 12:48:24 -0400 (0:00:00.079) 0:01:48.679 *******
skipping: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Reset raw variable] *******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52
Saturday 10 August 2024 12:48:24 -0400 (0:00:00.033) 0:01:48.713 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_raw": null
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Remove quadlet file] ******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42
Saturday 10 August 2024 12:48:24 -0400 (0:00:00.031) 0:01:48.744 *******
changed: [managed_node1] => {
"changed": true,
"path": "/etc/containers/systemd/quadlet-demo.kube",
"state": "absent"
}
TASK [fedora.linux_system_roles.podman : Refresh systemd] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48
Saturday 10 August 2024 12:48:25 -0400 (0:00:00.376) 0:01:49.121 *******
ok: [managed_node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Remove managed resource] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58
Saturday 10 August 2024 12:48:25 -0400 (0:00:00.637) 0:01:49.758 *******
skipping: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Remove volumes] ***********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:95
Saturday 10 August 2024 12:48:25 -0400 (0:00:00.036) 0:01:49.795 *******
skipping: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] *********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:112
Saturday 10 August 2024 12:48:25 -0400 (0:00:00.049) 0:01:49.844 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_parsed": null
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116
Saturday 10 August 2024 12:48:25 -0400 (0:00:00.030) 0:01:49.875 *******
changed: [managed_node1] => {
"changed": true,
"cmd": [
"podman",
"image",
"prune",
"--all",
"-f"
],
"delta": "0:00:00.735624",
"end": "2024-08-10 12:48:27.043796",
"rc": 0,
"start": "2024-08-10 12:48:26.308172"
}
STDOUT:
fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b
5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d
2ce87255eb4338f0b07f3cdf5e5badcec432875e7e760929a320bf2960b56933
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:127
Saturday 10 August 2024 12:48:27 -0400 (0:00:01.116) 0:01:50.992 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 10 August 2024 12:48:27 -0400 (0:00:00.088) 0:01:51.081 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 10 August 2024 12:48:27 -0400 (0:00:00.032) 0:01:51.113 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 10 August 2024 12:48:27 -0400 (0:00:00.033) 0:01:51.147 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - images] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:137
Saturday 10 August 2024 12:48:27 -0400 (0:00:00.033) 0:01:51.180 *******
ok: [managed_node1] => {
"changed": false,
"cmd": [
"podman",
"images",
"-n"
],
"delta": "0:00:00.036964",
"end": "2024-08-10 12:48:27.648678",
"rc": 0,
"start": "2024-08-10 12:48:27.611714"
}
STDOUT:
quay.io/linux-system-roles/mysql 5.6 dd3b2a5dcb48 2 years ago 308 MB
TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:146
Saturday 10 August 2024 12:48:27 -0400 (0:00:00.416) 0:01:51.597 *******
ok: [managed_node1] => {
"changed": false,
"cmd": [
"podman",
"volume",
"ls",
"-n"
],
"delta": "0:00:00.030647",
"end": "2024-08-10 12:48:28.056970",
"rc": 0,
"start": "2024-08-10 12:48:28.026323"
}
STDOUT:
local systemd-quadlet-demo-mysql
local wp-pv-claim
local envoy-proxy-config
local envoy-certificates
TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:155
Saturday 10 August 2024 12:48:28 -0400 (0:00:00.408) 0:01:52.006 *******
ok: [managed_node1] => {
"changed": false,
"cmd": [
"podman",
"ps",
"--noheading"
],
"delta": "0:00:00.037602",
"end": "2024-08-10 12:48:28.476210",
"rc": 0,
"start": "2024-08-10 12:48:28.438608"
}
STDOUT:
77ecf6ddf9f0 quay.io/linux-system-roles/mysql:5.6 mysqld About a minute ago Up About a minute (healthy) quadlet-demo-mysql
TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:164
Saturday 10 August 2024 12:48:28 -0400 (0:00:00.418) 0:01:52.424 *******
ok: [managed_node1] => {
"changed": false,
"cmd": [
"podman",
"network",
"ls",
"-n",
"-q"
],
"delta": "0:00:00.059275",
"end": "2024-08-10 12:48:28.926109",
"rc": 0,
"start": "2024-08-10 12:48:28.866834"
}
STDOUT:
podman
podman-default-kube-network
systemd-quadlet-demo
TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:173
Saturday 10 August 2024 12:48:28 -0400 (0:00:00.449) 0:01:52.874 *******
ok: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - services] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:183
Saturday 10 August 2024 12:48:29 -0400 (0:00:00.403) 0:01:53.278 *******
ok: [managed_node1] => {
"ansible_facts": {
"services": {
"77ecf6ddf9f0a56506ba9d...2af32e2713127dc8d66f8b9853e38f9021fc6.service": {
"name": "77ecf6ddf9f0a56506ba9d...2af32e2713127dc8d66f8b9853e38f9021fc6.service",
"source": "systemd",
"state": "inactive",
"status": "transient"
},
"77ecf6ddf9f0a56506ba9dd67ea2af32e2713127dc8d66f8b9853e38f9021fc6.service": {
"name": "77ecf6ddf9f0a56506ba9dd67ea2af32e2713127dc8d66f8b9853e38f9021fc6.service",
"source": "systemd",
"state": "running",
"status": "unknown"
},
"NetworkManager-dispatcher.service": {
"name": "NetworkManager-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"NetworkManager-wait-online.service": {
"name": "NetworkManager-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"NetworkManager.service": {
"name": "NetworkManager.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"auditd.service": {
"name": "auditd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"auth-rpcgss-module.service": {
"name": "auth-rpcgss-module.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"autovt@.service": {
"name": "autovt@.service",
"source": "systemd",
"state": "unknown",
"status": "enabled"
},
"certmonger.service": {
"name": "certmonger.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"chrony-dnssrv@.service": {
"name": "chrony-dnssrv@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"chrony-wait.service": {
"name": "chrony-wait.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"chronyd.service": {
"name": "chronyd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"cloud-config.service": {
"name": "cloud-config.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-final.service": {
"name": "cloud-final.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init-hotplugd.service": {
"name": "cloud-init-hotplugd.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"cloud-init-local.service": {
"name": "cloud-init-local.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init.service": {
"name": "cloud-init.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cni-dhcp.service": {
"name": "cni-dhcp.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"console-getty.service": {
"name": "console-getty.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"container-getty@.service": {
"name": "container-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"cpupower.service": {
"name": "cpupower.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"crond.service": {
"name": "crond.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"dbus-org.fedoraproject.FirewallD1.service": {
"name": "dbus-org.fedoraproject.FirewallD1.service",
"source": "systemd",
"state": "active",
"status": "enabled"
},
"dbus-org.freedesktop.hostname1.service": {
"name": "dbus-org.freedesktop.hostname1.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dbus-org.freedesktop.locale1.service": {
"name": "dbus-org.freedesktop.locale1.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dbus-org.freedesktop.login1.service": {
"name": "dbus-org.freedesktop.login1.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dbus-org.freedesktop.nm-dispatcher.service": {
"name": "dbus-org.freedesktop.nm-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"dbus-org.freedesktop.portable1.service": {
"name": "dbus-org.freedesktop.portable1.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dbus-org.freedesktop.timedate1.service": {
"name": "dbus-org.freedesktop.timedate1.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"dbus.service": {
"name": "dbus.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"debug-shell.service": {
"name": "debug-shell.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dnf-makecache.service": {
"name": "dnf-makecache.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dnf-system-upgrade-cleanup.service": {
"name": "dnf-system-upgrade-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dnf-system-upgrade.service": {
"name": "dnf-system-upgrade.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dnsmasq.service": {
"name": "dnsmasq.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dracut-cmdline.service": {
"name": "dracut-cmdline.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-initqueue.service": {
"name": "dracut-initqueue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-mount.service": {
"name": "dracut-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-mount.service": {
"name": "dracut-pre-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-pivot.service": {
"name": "dracut-pre-pivot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-trigger.service": {
"name": "dracut-pre-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-udev.service": {
"name": "dracut-pre-udev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown-onfailure.service": {
"name": "dracut-shutdown-onfailure.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown.service": {
"name": "dracut-shutdown.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ebtables.service": {
"name": "ebtables.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"emergency.service": {
"name": "emergency.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"firewalld.service": {
"name": "firewalld.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"fstrim.service": {
"name": "fstrim.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"getty@.service": {
"name": "getty@.service",
"source": "systemd",
"state": "unknown",
"status": "enabled"
},
"getty@tty1.service": {
"name": "getty@tty1.service",
"source": "systemd",
"state": "running",
"status": "unknown"
},
"grub-boot-indeterminate.service": {
"name": "grub-boot-indeterminate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"gssproxy.service": {
"name": "gssproxy.service",
"source": "systemd",
"state": "running",
"status": "disabled"
},
"halt-local.service": {
"name": "halt-local.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"import-state.service": {
"name": "import-state.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"initrd-cleanup.service": {
"name": "initrd-cleanup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-parse-etc.service": {
"name": "initrd-parse-etc.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-switch-root.service": {
"name": "initrd-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-udevadm-cleanup-db.service": {
"name": "initrd-udevadm-cleanup-db.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"iprdump.service": {
"name": "iprdump.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"iprinit.service": {
"name": "iprinit.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"iprupdate.service": {
"name": "iprupdate.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"irqbalance.service": {
"name": "irqbalance.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"kdump.service": {
"name": "kdump.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"kmod-static-nodes.service": {
"name": "kmod-static-nodes.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"kvm_stat.service": {
"name": "kvm_stat.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"ldconfig.service": {
"name": "ldconfig.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"loadmodules.service": {
"name": "loadmodules.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"man-db-cache-update.service": {
"name": "man-db-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"man-db-restart-cache-update.service": {
"name": "man-db-restart-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"messagebus.service": {
"name": "messagebus.service",
"source": "systemd",
"state": "active",
"status": "static"
},
"microcode.service": {
"name": "microcode.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"nfs-blkmap.service": {
"name": "nfs-blkmap.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-convert.service": {
"name": "nfs-convert.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-idmapd.service": {
"name": "nfs-idmapd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-mountd.service": {
"name": "nfs-mountd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-server.service": {
"name": "nfs-server.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"nfs-utils.service": {
"name": "nfs-utils.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfsdcld.service": {
"name": "nfsdcld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nftables.service": {
"name": "nftables.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"nis-domainname.service": {
"name": "nis-domainname.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"oddjobd.service": {
"name": "oddjobd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"plymouth-halt.service": {
"name": "plymouth-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-kexec.service": {
"name": "plymouth-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-poweroff.service": {
"name": "plymouth-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-quit-wait.service": {
"name": "plymouth-quit-wait.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-quit.service": {
"name": "plymouth-quit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-read-write.service": {
"name": "plymouth-read-write.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-reboot.service": {
"name": "plymouth-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-start.service": {
"name": "plymouth-start.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-switch-root-initramfs.service": {
"name": "plymouth-switch-root-initramfs.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-switch-root.service": {
"name": "plymouth-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"podman-auto-update.service": {
"name": "podman-auto-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman-clean-transient.service": {
"name": "podman-clean-transient.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman-kube@.service": {
"name": "podman-kube@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"podman-restart.service": {
"name": "podman-restart.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman.service": {
"name": "podman.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"polkit.service": {
"name": "polkit.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"qemu-guest-agent.service": {
"name": "qemu-guest-agent.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"quadlet-demo-mysql-volume.service": {
"name": "quadlet-demo-mysql-volume.service",
"source": "systemd",
"state": "stopped",
"status": "generated"
},
"quadlet-demo-mysql.service": {
"name": "quadlet-demo-mysql.service",
"source": "systemd",
"state": "running",
"status": "generated"
},
"quadlet-demo-network.service": {
"name": "quadlet-demo-network.service",
"source": "systemd",
"state": "stopped",
"status": "generated"
},
"quotaon.service": {
"name": "quotaon.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"rc-local.service": {
"name": "rc-local.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rdisc.service": {
"name": "rdisc.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"rescue.service": {
"name": "rescue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"restraintd.service": {
"name": "restraintd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rngd.service": {
"name": "rngd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rpc-gssd.service": {
"name": "rpc-gssd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd-notify.service": {
"name": "rpc-statd-notify.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd.service": {
"name": "rpc-statd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpcbind.service": {
"name": "rpcbind.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rsyslog.service": {
"name": "rsyslog.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"selinux-autorelabel-mark.service": {
"name": "selinux-autorelabel-mark.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"selinux-autorelabel.service": {
"name": "selinux-autorelabel.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"serial-getty@.service": {
"name": "serial-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"sshd-keygen@.service": {
"name": "sshd-keygen@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"sshd-keygen@ecdsa.service": {
"name": "sshd-keygen@ecdsa.service",
"source": "systemd",
"state": "stopped",
"status": "unknown"
},
"sshd-keygen@ed25519.service": {
"name": "sshd-keygen@ed25519.service",
"source": "systemd",
"state": "stopped",
"status": "unknown"
},
"sshd-keygen@rsa.service": {
"name": "sshd-keygen@rsa.service",
"source": "systemd",
"state": "stopped",
"status": "unknown"
},
"sshd.service": {
"name": "sshd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"sshd@.service": {
"name": "sshd@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"sssd-autofs.service": {
"name": "sssd-autofs.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-kcm.service": {
"name": "sssd-kcm.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"sssd-nss.service": {
"name": "sssd-nss.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pac.service": {
"name": "sssd-pac.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pam.service": {
"name": "sssd-pam.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-ssh.service": {
"name": "sssd-ssh.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-sudo.service": {
"name": "sssd-sudo.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd.service": {
"name": "sssd.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"syslog.service": {
"name": "syslog.service",
"source": "systemd",
"state": "active",
"status": "enabled"
},
"system-update-cleanup.service": {
"name": "system-update-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-ask-password-console.service": {
"name": "systemd-ask-password-console.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-plymouth.service": {
"name": "systemd-ask-password-plymouth.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-wall.service": {
"name": "systemd-ask-password-wall.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-backlight@.service": {
"name": "systemd-backlight@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-binfmt.service": {
"name": "systemd-binfmt.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-coredump@.service": {
"name": "systemd-coredump@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-exit.service": {
"name": "systemd-exit.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-firstboot.service": {
"name": "systemd-firstboot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck-root.service": {
"name": "systemd-fsck-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck@.service": {
"name": "systemd-fsck@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-halt.service": {
"name": "systemd-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hibernate-resume@.service": {
"name": "systemd-hibernate-resume@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-hibernate.service": {
"name": "systemd-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hostnamed.service": {
"name": "systemd-hostnamed.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hwdb-update.service": {
"name": "systemd-hwdb-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hybrid-sleep.service": {
"name": "systemd-hybrid-sleep.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-initctl.service": {
"name": "systemd-initctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-catalog-update.service": {
"name": "systemd-journal-catalog-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-flush.service": {
"name": "systemd-journal-flush.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journald.service": {
"name": "systemd-journald.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-kexec.service": {
"name": "systemd-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-localed.service": {
"name": "systemd-localed.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-logind.service": {
"name": "systemd-logind.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-machine-id-commit.service": {
"name": "systemd-machine-id-commit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-modules-load.service": {
"name": "systemd-modules-load.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-portabled.service": {
"name": "systemd-portabled.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-poweroff.service": {
"name": "systemd-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-pstore.service": {
"name": "systemd-pstore.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-quotacheck.service": {
"name": "systemd-quotacheck.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-random-seed.service": {
"name": "systemd-random-seed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-reboot.service": {
"name": "systemd-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-remount-fs.service": {
"name": "systemd-remount-fs.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-resolved.service": {
"name": "systemd-resolved.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-rfkill.service": {
"name": "systemd-rfkill.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend-then-hibernate.service": {
"name": "systemd-suspend-then-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend.service": {
"name": "systemd-suspend.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-sysctl.service": {
"name": "systemd-sysctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-sysusers.service": {
"name": "systemd-sysusers.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-timedated.service": {
"name": "systemd-timedated.service",
"source": "systemd",
"state": "inactive",
"status": "masked"
},
"systemd-tmpfiles-clean.service": {
"name": "systemd-tmpfiles-clean.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup-dev.service": {
"name": "systemd-tmpfiles-setup-dev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup.service": {
"name": "systemd-tmpfiles-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udev-settle.service": {
"name": "systemd-udev-settle.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-udev-trigger.service": {
"name": "systemd-udev-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udevd.service": {
"name": "systemd-udevd.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-update-done.service": {
"name": "systemd-update-done.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp-runlevel.service": {
"name": "systemd-update-utmp-runlevel.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp.service": {
"name": "systemd-update-utmp.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-user-sessions.service": {
"name": "systemd-user-sessions.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-vconsole-setup.service": {
"name": "systemd-vconsole-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-volatile-root.service": {
"name": "systemd-volatile-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"tcsd.service": {
"name": "tcsd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"teamd@.service": {
"name": "teamd@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"timedatex.service": {
"name": "timedatex.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"tuned.service": {
"name": "tuned.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"unbound-anchor.service": {
"name": "unbound-anchor.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"user-runtime-dir@.service": {
"name": "user-runtime-dir@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user-runtime-dir@0.service": {
"name": "user-runtime-dir@0.service",
"source": "systemd",
"state": "stopped",
"status": "unknown"
},
"user@.service": {
"name": "user@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user@0.service": {
"name": "user@0.service",
"source": "systemd",
"state": "running",
"status": "unknown"
}
}
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:114
Saturday 10 August 2024 12:48:31 -0400 (0:00:01.887) 0:01:55.165 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Saturday 10 August 2024 12:48:31 -0400 (0:00:00.036) 0:01:55.202 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "---\napiVersion: v1\nkind: PersistentVolumeClaim\nmetadata:\n name: wp-pv-claim\n labels:\n app: wordpress\nspec:\n accessModes:\n - ReadWriteOnce\n resources:\n requests:\n storage: 20Gi\n---\napiVersion: v1\nkind: Pod\nmetadata:\n name: quadlet-demo\nspec:\n containers:\n - name: wordpress\n image: quay.io/linux-system-roles/wordpress:4.8-apache\n env:\n - name: WORDPRESS_DB_HOST\n value: quadlet-demo-mysql\n - name: WORDPRESS_DB_PASSWORD\n valueFrom:\n secretKeyRef:\n name: mysql-root-password-kube\n key: password\n volumeMounts:\n - name: wordpress-persistent-storage\n mountPath: /var/www/html\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n - name: envoy\n image: quay.io/linux-system-roles/envoyproxy:v1.25.0\n volumeMounts:\n - name: config-volume\n mountPath: /etc/envoy\n - name: certificates\n mountPath: /etc/envoy-certificates\n env:\n - name: ENVOY_UID\n value: \"0\"\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n volumes:\n - name: config-volume\n configMap:\n name: envoy-proxy-config\n - name: certificates\n secret:\n secretName: envoy-certificates\n - name: wordpress-persistent-storage\n persistentVolumeClaim:\n claimName: wp-pv-claim\n - name: www # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3\n - name: create # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3-create\n",
"__podman_quadlet_template_src": "quadlet-demo.yml.j2"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Saturday 10 August 2024 12:48:31 -0400 (0:00:00.117) 0:01:55.320 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "absent",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Saturday 10 August 2024 12:48:31 -0400 (0:00:00.044) 0:01:55.364 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Saturday 10 August 2024 12:48:31 -0400 (0:00:00.038) 0:01:55.403 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo",
"__podman_quadlet_type": "yml",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Saturday 10 August 2024 12:48:31 -0400 (0:00:00.051) 0:01:55.454 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 10 August 2024 12:48:31 -0400 (0:00:00.068) 0:01:55.522 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 10 August 2024 12:48:31 -0400 (0:00:00.044) 0:01:55.567 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 10 August 2024 12:48:31 -0400 (0:00:00.043) 0:01:55.611 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get group information] ****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Saturday 10 August 2024 12:48:31 -0400 (0:00:00.110) 0:01:55.722 *******
ok: [managed_node1] => {
"ansible_facts": {
"getent_group": {
"root": [
"x",
"0",
""
]
}
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set group name] ***********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35
Saturday 10 August 2024 12:48:32 -0400 (0:00:00.384) 0:01:56.106 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_group_name": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Saturday 10 August 2024 12:48:32 -0400 (0:00:00.047) 0:01:56.153 *******
ok: [managed_node1] => {
"changed": false,
"stat": {
"atime": 1723307429.1990302,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b",
"ctime": 1723307390.9915028,
"dev": 51713,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 6986657,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-sharedlib",
"mode": "0755",
"mtime": 1700557386.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 12640,
"uid": 0,
"version": "1934096266",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check user with getsubids] ************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:50
Saturday 10 August 2024 12:48:32 -0400 (0:00:00.376) 0:01:56.530 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check group with getsubids] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:55
Saturday 10 August 2024 12:48:32 -0400 (0:00:00.037) 0:01:56.568 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:60
Saturday 10 August 2024 12:48:32 -0400 (0:00:00.034) 0:01:56.603 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74
Saturday 10 August 2024 12:48:32 -0400 (0:00:00.036) 0:01:56.639 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79
Saturday 10 August 2024 12:48:32 -0400 (0:00:00.035) 0:01:56.675 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:84
Saturday 10 August 2024 12:48:32 -0400 (0:00:00.034) 0:01:56.710 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:94
Saturday 10 August 2024 12:48:32 -0400 (0:00:00.035) 0:01:56.746 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if group not in subgid file] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:101
Saturday 10 August 2024 12:48:32 -0400 (0:00:00.036) 0:01:56.782 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Saturday 10 August 2024 12:48:32 -0400 (0:00:00.034) 0:01:56.817 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Saturday 10 August 2024 12:48:33 -0400 (0:00:00.065) 0:01:56.882 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Saturday 10 August 2024 12:48:33 -0400 (0:00:00.072) 0:01:56.955 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Saturday 10 August 2024 12:48:33 -0400 (0:00:00.035) 0:01:56.990 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_images": [],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.yml",
"__podman_volumes": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:103
Saturday 10 August 2024 12:48:33 -0400 (0:00:00.084) 0:01:57.075 *******
ok: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:110
Saturday 10 August 2024 12:48:33 -0400 (0:00:00.042) 0:01:57.118 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4
Saturday 10 August 2024 12:48:33 -0400 (0:00:00.085) 0:01:57.203 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stop and disable service] *************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12
Saturday 10 August 2024 12:48:33 -0400 (0:00:00.036) 0:01:57.239 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : See if quadlet file exists] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33
Saturday 10 August 2024 12:48:33 -0400 (0:00:00.037) 0:01:57.277 *******
ok: [managed_node1] => {
"changed": false,
"stat": {
"atime": 1723308458.9677727,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 8,
"charset": "us-ascii",
"checksum": "998dccde0483b1654327a46ddd89cbaa47650370",
"ctime": 1723308456.1917453,
"dev": 51713,
"device_type": 0,
"executable": false,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 1295754,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "text/plain",
"mode": "0644",
"mtime": 1723308455.7877412,
"nlink": 1,
"path": "/etc/containers/systemd/quadlet-demo.yml",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 1605,
"uid": 0,
"version": "3809052853",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": false,
"xoth": false,
"xusr": false
}
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38
Saturday 10 August 2024 12:48:33 -0400 (0:00:00.383) 0:01:57.661 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Slurp quadlet file] *******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6
Saturday 10 August 2024 12:48:33 -0400 (0:00:00.066) 0:01:57.727 *******
ok: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12
Saturday 10 August 2024 12:48:34 -0400 (0:00:00.366) 0:01:58.094 *******
skipping: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44
Saturday 10 August 2024 12:48:34 -0400 (0:00:00.035) 0:01:58.129 *******
ok: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Reset raw variable] *******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52
Saturday 10 August 2024 12:48:34 -0400 (0:00:00.042) 0:01:58.172 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_raw": null
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Remove quadlet file] ******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42
Saturday 10 August 2024 12:48:34 -0400 (0:00:00.071) 0:01:58.243 *******
changed: [managed_node1] => {
"changed": true,
"path": "/etc/containers/systemd/quadlet-demo.yml",
"state": "absent"
}
TASK [fedora.linux_system_roles.podman : Refresh systemd] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48
Saturday 10 August 2024 12:48:34 -0400 (0:00:00.376) 0:01:58.620 *******
ok: [managed_node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Remove managed resource] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58
Saturday 10 August 2024 12:48:35 -0400 (0:00:00.621) 0:01:59.241 *******
skipping: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Remove volumes] ***********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:95
Saturday 10 August 2024 12:48:35 -0400 (0:00:00.039) 0:01:59.281 *******
skipping: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] *********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:112
Saturday 10 August 2024 12:48:35 -0400 (0:00:00.053) 0:01:59.334 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_parsed": null
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116
Saturday 10 August 2024 12:48:35 -0400 (0:00:00.035) 0:01:59.370 *******
changed: [managed_node1] => {
"changed": true,
"cmd": [
"podman",
"image",
"prune",
"--all",
"-f"
],
"delta": "0:00:00.033579",
"end": "2024-08-10 12:48:35.832084",
"rc": 0,
"start": "2024-08-10 12:48:35.798505"
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:127
Saturday 10 August 2024 12:48:35 -0400 (0:00:00.412) 0:01:59.783 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 10 August 2024 12:48:35 -0400 (0:00:00.065) 0:01:59.848 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 10 August 2024 12:48:36 -0400 (0:00:00.034) 0:01:59.883 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 10 August 2024 12:48:36 -0400 (0:00:00.035) 0:01:59.918 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - images] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:137
Saturday 10 August 2024 12:48:36 -0400 (0:00:00.068) 0:01:59.987 *******
ok: [managed_node1] => {
"changed": false,
"cmd": [
"podman",
"images",
"-n"
],
"delta": "0:00:00.036210",
"end": "2024-08-10 12:48:36.451616",
"rc": 0,
"start": "2024-08-10 12:48:36.415406"
}
STDOUT:
quay.io/linux-system-roles/mysql 5.6 dd3b2a5dcb48 2 years ago 308 MB
TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:146
Saturday 10 August 2024 12:48:36 -0400 (0:00:00.415) 0:02:00.403 *******
ok: [managed_node1] => {
"changed": false,
"cmd": [
"podman",
"volume",
"ls",
"-n"
],
"delta": "0:00:00.034552",
"end": "2024-08-10 12:48:36.866694",
"rc": 0,
"start": "2024-08-10 12:48:36.832142"
}
STDOUT:
local systemd-quadlet-demo-mysql
local wp-pv-claim
local envoy-proxy-config
local envoy-certificates
TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:155
Saturday 10 August 2024 12:48:36 -0400 (0:00:00.414) 0:02:00.818 *******
ok: [managed_node1] => {
"changed": false,
"cmd": [
"podman",
"ps",
"--noheading"
],
"delta": "0:00:00.039169",
"end": "2024-08-10 12:48:37.287965",
"rc": 0,
"start": "2024-08-10 12:48:37.248796"
}
STDOUT:
77ecf6ddf9f0 quay.io/linux-system-roles/mysql:5.6 mysqld About a minute ago Up About a minute (healthy) quadlet-demo-mysql
TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:164
Saturday 10 August 2024 12:48:37 -0400 (0:00:00.421) 0:02:01.240 *******
ok: [managed_node1] => {
"changed": false,
"cmd": [
"podman",
"network",
"ls",
"-n",
"-q"
],
"delta": "0:00:00.059670",
"end": "2024-08-10 12:48:37.727456",
"rc": 0,
"start": "2024-08-10 12:48:37.667786"
}
STDOUT:
podman
podman-default-kube-network
systemd-quadlet-demo
TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:173
Saturday 10 August 2024 12:48:37 -0400 (0:00:00.439) 0:02:01.679 *******
ok: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - services] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:183
Saturday 10 August 2024 12:48:38 -0400 (0:00:00.410) 0:02:02.089 *******
ok: [managed_node1] => {
"ansible_facts": {
"services": {
"77ecf6ddf9f0a56506ba9d...2af32e2713127dc8d66f8b9853e38f9021fc6.service": {
"name": "77ecf6ddf9f0a56506ba9d...2af32e2713127dc8d66f8b9853e38f9021fc6.service",
"source": "systemd",
"state": "inactive",
"status": "transient"
},
"77ecf6ddf9f0a56506ba9dd67ea2af32e2713127dc8d66f8b9853e38f9021fc6.service": {
"name": "77ecf6ddf9f0a56506ba9dd67ea2af32e2713127dc8d66f8b9853e38f9021fc6.service",
"source": "systemd",
"state": "stopped",
"status": "unknown"
},
"NetworkManager-dispatcher.service": {
"name": "NetworkManager-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"NetworkManager-wait-online.service": {
"name": "NetworkManager-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"NetworkManager.service": {
"name": "NetworkManager.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"auditd.service": {
"name": "auditd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"auth-rpcgss-module.service": {
"name": "auth-rpcgss-module.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"autovt@.service": {
"name": "autovt@.service",
"source": "systemd",
"state": "unknown",
"status": "enabled"
},
"certmonger.service": {
"name": "certmonger.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"chrony-dnssrv@.service": {
"name": "chrony-dnssrv@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"chrony-wait.service": {
"name": "chrony-wait.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"chronyd.service": {
"name": "chronyd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"cloud-config.service": {
"name": "cloud-config.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-final.service": {
"name": "cloud-final.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init-hotplugd.service": {
"name": "cloud-init-hotplugd.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"cloud-init-local.service": {
"name": "cloud-init-local.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init.service": {
"name": "cloud-init.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cni-dhcp.service": {
"name": "cni-dhcp.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"console-getty.service": {
"name": "console-getty.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"container-getty@.service": {
"name": "container-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"cpupower.service": {
"name": "cpupower.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"crond.service": {
"name": "crond.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"dbus-org.fedoraproject.FirewallD1.service": {
"name": "dbus-org.fedoraproject.FirewallD1.service",
"source": "systemd",
"state": "active",
"status": "enabled"
},
"dbus-org.freedesktop.hostname1.service": {
"name": "dbus-org.freedesktop.hostname1.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dbus-org.freedesktop.locale1.service": {
"name": "dbus-org.freedesktop.locale1.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dbus-org.freedesktop.login1.service": {
"name": "dbus-org.freedesktop.login1.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dbus-org.freedesktop.nm-dispatcher.service": {
"name": "dbus-org.freedesktop.nm-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"dbus-org.freedesktop.portable1.service": {
"name": "dbus-org.freedesktop.portable1.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dbus-org.freedesktop.timedate1.service": {
"name": "dbus-org.freedesktop.timedate1.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"dbus.service": {
"name": "dbus.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"debug-shell.service": {
"name": "debug-shell.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dnf-makecache.service": {
"name": "dnf-makecache.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dnf-system-upgrade-cleanup.service": {
"name": "dnf-system-upgrade-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dnf-system-upgrade.service": {
"name": "dnf-system-upgrade.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dnsmasq.service": {
"name": "dnsmasq.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dracut-cmdline.service": {
"name": "dracut-cmdline.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-initqueue.service": {
"name": "dracut-initqueue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-mount.service": {
"name": "dracut-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-mount.service": {
"name": "dracut-pre-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-pivot.service": {
"name": "dracut-pre-pivot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-trigger.service": {
"name": "dracut-pre-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-udev.service": {
"name": "dracut-pre-udev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown-onfailure.service": {
"name": "dracut-shutdown-onfailure.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown.service": {
"name": "dracut-shutdown.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ebtables.service": {
"name": "ebtables.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"emergency.service": {
"name": "emergency.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"firewalld.service": {
"name": "firewalld.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"fstrim.service": {
"name": "fstrim.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"getty@.service": {
"name": "getty@.service",
"source": "systemd",
"state": "unknown",
"status": "enabled"
},
"getty@tty1.service": {
"name": "getty@tty1.service",
"source": "systemd",
"state": "running",
"status": "unknown"
},
"grub-boot-indeterminate.service": {
"name": "grub-boot-indeterminate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"gssproxy.service": {
"name": "gssproxy.service",
"source": "systemd",
"state": "running",
"status": "disabled"
},
"halt-local.service": {
"name": "halt-local.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"import-state.service": {
"name": "import-state.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"initrd-cleanup.service": {
"name": "initrd-cleanup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-parse-etc.service": {
"name": "initrd-parse-etc.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-switch-root.service": {
"name": "initrd-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-udevadm-cleanup-db.service": {
"name": "initrd-udevadm-cleanup-db.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"iprdump.service": {
"name": "iprdump.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"iprinit.service": {
"name": "iprinit.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"iprupdate.service": {
"name": "iprupdate.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"irqbalance.service": {
"name": "irqbalance.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"kdump.service": {
"name": "kdump.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"kmod-static-nodes.service": {
"name": "kmod-static-nodes.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"kvm_stat.service": {
"name": "kvm_stat.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"ldconfig.service": {
"name": "ldconfig.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"loadmodules.service": {
"name": "loadmodules.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"man-db-cache-update.service": {
"name": "man-db-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"man-db-restart-cache-update.service": {
"name": "man-db-restart-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"messagebus.service": {
"name": "messagebus.service",
"source": "systemd",
"state": "active",
"status": "static"
},
"microcode.service": {
"name": "microcode.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"nfs-blkmap.service": {
"name": "nfs-blkmap.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-convert.service": {
"name": "nfs-convert.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-idmapd.service": {
"name": "nfs-idmapd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-mountd.service": {
"name": "nfs-mountd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-server.service": {
"name": "nfs-server.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"nfs-utils.service": {
"name": "nfs-utils.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfsdcld.service": {
"name": "nfsdcld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nftables.service": {
"name": "nftables.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"nis-domainname.service": {
"name": "nis-domainname.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"oddjobd.service": {
"name": "oddjobd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"plymouth-halt.service": {
"name": "plymouth-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-kexec.service": {
"name": "plymouth-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-poweroff.service": {
"name": "plymouth-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-quit-wait.service": {
"name": "plymouth-quit-wait.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-quit.service": {
"name": "plymouth-quit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-read-write.service": {
"name": "plymouth-read-write.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-reboot.service": {
"name": "plymouth-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-start.service": {
"name": "plymouth-start.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-switch-root-initramfs.service": {
"name": "plymouth-switch-root-initramfs.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-switch-root.service": {
"name": "plymouth-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"podman-auto-update.service": {
"name": "podman-auto-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman-clean-transient.service": {
"name": "podman-clean-transient.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman-kube@.service": {
"name": "podman-kube@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"podman-restart.service": {
"name": "podman-restart.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman.service": {
"name": "podman.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"polkit.service": {
"name": "polkit.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"qemu-guest-agent.service": {
"name": "qemu-guest-agent.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"quadlet-demo-mysql-volume.service": {
"name": "quadlet-demo-mysql-volume.service",
"source": "systemd",
"state": "stopped",
"status": "generated"
},
"quadlet-demo-mysql.service": {
"name": "quadlet-demo-mysql.service",
"source": "systemd",
"state": "running",
"status": "generated"
},
"quadlet-demo-network.service": {
"name": "quadlet-demo-network.service",
"source": "systemd",
"state": "stopped",
"status": "generated"
},
"quotaon.service": {
"name": "quotaon.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"rc-local.service": {
"name": "rc-local.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rdisc.service": {
"name": "rdisc.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"rescue.service": {
"name": "rescue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"restraintd.service": {
"name": "restraintd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rngd.service": {
"name": "rngd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rpc-gssd.service": {
"name": "rpc-gssd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd-notify.service": {
"name": "rpc-statd-notify.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd.service": {
"name": "rpc-statd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpcbind.service": {
"name": "rpcbind.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rsyslog.service": {
"name": "rsyslog.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"selinux-autorelabel-mark.service": {
"name": "selinux-autorelabel-mark.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"selinux-autorelabel.service": {
"name": "selinux-autorelabel.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"serial-getty@.service": {
"name": "serial-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"sshd-keygen@.service": {
"name": "sshd-keygen@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"sshd-keygen@ecdsa.service": {
"name": "sshd-keygen@ecdsa.service",
"source": "systemd",
"state": "stopped",
"status": "unknown"
},
"sshd-keygen@ed25519.service": {
"name": "sshd-keygen@ed25519.service",
"source": "systemd",
"state": "stopped",
"status": "unknown"
},
"sshd-keygen@rsa.service": {
"name": "sshd-keygen@rsa.service",
"source": "systemd",
"state": "stopped",
"status": "unknown"
},
"sshd.service": {
"name": "sshd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"sshd@.service": {
"name": "sshd@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"sssd-autofs.service": {
"name": "sssd-autofs.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-kcm.service": {
"name": "sssd-kcm.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"sssd-nss.service": {
"name": "sssd-nss.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pac.service": {
"name": "sssd-pac.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pam.service": {
"name": "sssd-pam.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-ssh.service": {
"name": "sssd-ssh.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-sudo.service": {
"name": "sssd-sudo.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd.service": {
"name": "sssd.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"syslog.service": {
"name": "syslog.service",
"source": "systemd",
"state": "active",
"status": "enabled"
},
"system-update-cleanup.service": {
"name": "system-update-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-ask-password-console.service": {
"name": "systemd-ask-password-console.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-plymouth.service": {
"name": "systemd-ask-password-plymouth.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-wall.service": {
"name": "systemd-ask-password-wall.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-backlight@.service": {
"name": "systemd-backlight@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-binfmt.service": {
"name": "systemd-binfmt.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-coredump@.service": {
"name": "systemd-coredump@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-exit.service": {
"name": "systemd-exit.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-firstboot.service": {
"name": "systemd-firstboot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck-root.service": {
"name": "systemd-fsck-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck@.service": {
"name": "systemd-fsck@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-halt.service": {
"name": "systemd-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hibernate-resume@.service": {
"name": "systemd-hibernate-resume@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-hibernate.service": {
"name": "systemd-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hostnamed.service": {
"name": "systemd-hostnamed.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hwdb-update.service": {
"name": "systemd-hwdb-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hybrid-sleep.service": {
"name": "systemd-hybrid-sleep.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-initctl.service": {
"name": "systemd-initctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-catalog-update.service": {
"name": "systemd-journal-catalog-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-flush.service": {
"name": "systemd-journal-flush.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journald.service": {
"name": "systemd-journald.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-kexec.service": {
"name": "systemd-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-localed.service": {
"name": "systemd-localed.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-logind.service": {
"name": "systemd-logind.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-machine-id-commit.service": {
"name": "systemd-machine-id-commit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-modules-load.service": {
"name": "systemd-modules-load.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-portabled.service": {
"name": "systemd-portabled.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-poweroff.service": {
"name": "systemd-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-pstore.service": {
"name": "systemd-pstore.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-quotacheck.service": {
"name": "systemd-quotacheck.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-random-seed.service": {
"name": "systemd-random-seed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-reboot.service": {
"name": "systemd-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-remount-fs.service": {
"name": "systemd-remount-fs.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-resolved.service": {
"name": "systemd-resolved.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-rfkill.service": {
"name": "systemd-rfkill.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend-then-hibernate.service": {
"name": "systemd-suspend-then-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend.service": {
"name": "systemd-suspend.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-sysctl.service": {
"name": "systemd-sysctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-sysusers.service": {
"name": "systemd-sysusers.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-timedated.service": {
"name": "systemd-timedated.service",
"source": "systemd",
"state": "inactive",
"status": "masked"
},
"systemd-tmpfiles-clean.service": {
"name": "systemd-tmpfiles-clean.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup-dev.service": {
"name": "systemd-tmpfiles-setup-dev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup.service": {
"name": "systemd-tmpfiles-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udev-settle.service": {
"name": "systemd-udev-settle.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-udev-trigger.service": {
"name": "systemd-udev-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udevd.service": {
"name": "systemd-udevd.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-update-done.service": {
"name": "systemd-update-done.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp-runlevel.service": {
"name": "systemd-update-utmp-runlevel.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp.service": {
"name": "systemd-update-utmp.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-user-sessions.service": {
"name": "systemd-user-sessions.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-vconsole-setup.service": {
"name": "systemd-vconsole-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-volatile-root.service": {
"name": "systemd-volatile-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"tcsd.service": {
"name": "tcsd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"teamd@.service": {
"name": "teamd@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"timedatex.service": {
"name": "timedatex.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"tuned.service": {
"name": "tuned.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"unbound-anchor.service": {
"name": "unbound-anchor.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"user-runtime-dir@.service": {
"name": "user-runtime-dir@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user-runtime-dir@0.service": {
"name": "user-runtime-dir@0.service",
"source": "systemd",
"state": "stopped",
"status": "unknown"
},
"user@.service": {
"name": "user@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user@0.service": {
"name": "user@0.service",
"source": "systemd",
"state": "running",
"status": "unknown"
}
}
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:114
Saturday 10 August 2024 12:48:39 -0400 (0:00:01.769) 0:02:03.859 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Saturday 10 August 2024 12:48:40 -0400 (0:00:00.036) 0:02:03.895 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "envoy-proxy-configmap.yml",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "---\napiVersion: v1\nkind: ConfigMap\nmetadata:\n name: envoy-proxy-config\ndata:\n envoy.yaml: |\n admin:\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 9901\n\n static_resources:\n listeners:\n - name: listener_0\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 8080\n filter_chains:\n - filters:\n - name: envoy.filters.network.http_connection_manager\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager\n stat_prefix: ingress_http\n codec_type: AUTO\n route_config:\n name: local_route\n virtual_hosts:\n - name: local_service\n domains: [\"*\"]\n routes:\n - match:\n prefix: \"/\"\n route:\n cluster: backend\n http_filters:\n - name: envoy.filters.http.router\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.http.router.v3.Router\n transport_socket:\n name: envoy.transport_sockets.tls\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.transport_sockets.tls.v3.DownstreamTlsContext\n common_tls_context:\n tls_certificates:\n - certificate_chain:\n filename: /etc/envoy-certificates/certificate.pem\n private_key:\n filename: /etc/envoy-certificates/certificate.key\n clusters:\n - name: backend\n connect_timeout: 5s\n type: STATIC\n dns_refresh_rate: 1800s\n lb_policy: ROUND_ROBIN\n load_assignment:\n cluster_name: backend\n endpoints:\n - lb_endpoints:\n - endpoint:\n address:\n socket_address:\n address: 127.0.0.1\n port_value: 80",
"__podman_quadlet_template_src": ""
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Saturday 10 August 2024 12:48:40 -0400 (0:00:00.049) 0:02:03.945 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "absent",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Saturday 10 August 2024 12:48:40 -0400 (0:00:00.043) 0:02:03.989 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Saturday 10 August 2024 12:48:40 -0400 (0:00:00.037) 0:02:04.026 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_name": "envoy-proxy-configmap",
"__podman_quadlet_type": "yml",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Saturday 10 August 2024 12:48:40 -0400 (0:00:00.050) 0:02:04.077 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 10 August 2024 12:48:40 -0400 (0:00:00.068) 0:02:04.145 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 10 August 2024 12:48:40 -0400 (0:00:00.044) 0:02:04.190 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 10 August 2024 12:48:40 -0400 (0:00:00.042) 0:02:04.233 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get group information] ****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Saturday 10 August 2024 12:48:40 -0400 (0:00:00.050) 0:02:04.283 *******
ok: [managed_node1] => {
"ansible_facts": {
"getent_group": {
"root": [
"x",
"0",
""
]
}
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set group name] ***********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35
Saturday 10 August 2024 12:48:40 -0400 (0:00:00.385) 0:02:04.669 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_group_name": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Saturday 10 August 2024 12:48:40 -0400 (0:00:00.045) 0:02:04.714 *******
ok: [managed_node1] => {
"changed": false,
"stat": {
"atime": 1723307429.1990302,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b",
"ctime": 1723307390.9915028,
"dev": 51713,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 6986657,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-sharedlib",
"mode": "0755",
"mtime": 1700557386.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 12640,
"uid": 0,
"version": "1934096266",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check user with getsubids] ************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:50
Saturday 10 August 2024 12:48:41 -0400 (0:00:00.422) 0:02:05.137 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check group with getsubids] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:55
Saturday 10 August 2024 12:48:41 -0400 (0:00:00.037) 0:02:05.174 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:60
Saturday 10 August 2024 12:48:41 -0400 (0:00:00.035) 0:02:05.210 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74
Saturday 10 August 2024 12:48:41 -0400 (0:00:00.036) 0:02:05.246 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79
Saturday 10 August 2024 12:48:41 -0400 (0:00:00.037) 0:02:05.283 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:84
Saturday 10 August 2024 12:48:41 -0400 (0:00:00.035) 0:02:05.318 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:94
Saturday 10 August 2024 12:48:41 -0400 (0:00:00.037) 0:02:05.356 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if group not in subgid file] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:101
Saturday 10 August 2024 12:48:41 -0400 (0:00:00.036) 0:02:05.392 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Saturday 10 August 2024 12:48:41 -0400 (0:00:00.035) 0:02:05.427 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Saturday 10 August 2024 12:48:41 -0400 (0:00:00.064) 0:02:05.491 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Saturday 10 August 2024 12:48:41 -0400 (0:00:00.038) 0:02:05.530 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Saturday 10 August 2024 12:48:41 -0400 (0:00:00.034) 0:02:05.564 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_images": [],
"__podman_quadlet_file": "/etc/containers/systemd/envoy-proxy-configmap.yml",
"__podman_volumes": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:103
Saturday 10 August 2024 12:48:41 -0400 (0:00:00.083) 0:02:05.648 *******
ok: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:110
Saturday 10 August 2024 12:48:41 -0400 (0:00:00.042) 0:02:05.690 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4
Saturday 10 August 2024 12:48:41 -0400 (0:00:00.083) 0:02:05.773 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stop and disable service] *************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12
Saturday 10 August 2024 12:48:41 -0400 (0:00:00.071) 0:02:05.845 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : See if quadlet file exists] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33
Saturday 10 August 2024 12:48:42 -0400 (0:00:00.039) 0:02:05.884 *******
ok: [managed_node1] => {
"changed": false,
"stat": {
"atime": 1723308480.7809868,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 8,
"charset": "us-ascii",
"checksum": "d681c7d56f912150d041873e880818b22a90c188",
"ctime": 1723308451.6457002,
"dev": 51713,
"device_type": 0,
"executable": false,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 501219494,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "text/plain",
"mode": "0644",
"mtime": 1723308451.333697,
"nlink": 1,
"path": "/etc/containers/systemd/envoy-proxy-configmap.yml",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 2102,
"uid": 0,
"version": "1348679410",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": false,
"xoth": false,
"xusr": false
}
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38
Saturday 10 August 2024 12:48:42 -0400 (0:00:00.392) 0:02:06.277 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Slurp quadlet file] *******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6
Saturday 10 August 2024 12:48:42 -0400 (0:00:00.067) 0:02:06.344 *******
ok: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12
Saturday 10 August 2024 12:48:42 -0400 (0:00:00.368) 0:02:06.713 *******
skipping: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44
Saturday 10 August 2024 12:48:42 -0400 (0:00:00.036) 0:02:06.749 *******
ok: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Reset raw variable] *******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52
Saturday 10 August 2024 12:48:42 -0400 (0:00:00.041) 0:02:06.791 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_raw": null
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Remove quadlet file] ******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42
Saturday 10 August 2024 12:48:42 -0400 (0:00:00.035) 0:02:06.826 *******
changed: [managed_node1] => {
"changed": true,
"path": "/etc/containers/systemd/envoy-proxy-configmap.yml",
"state": "absent"
}
TASK [fedora.linux_system_roles.podman : Refresh systemd] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48
Saturday 10 August 2024 12:48:43 -0400 (0:00:00.381) 0:02:07.207 *******
ok: [managed_node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Remove managed resource] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58
Saturday 10 August 2024 12:48:43 -0400 (0:00:00.654) 0:02:07.862 *******
skipping: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Remove volumes] ***********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:95
Saturday 10 August 2024 12:48:44 -0400 (0:00:00.040) 0:02:07.902 *******
skipping: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] *********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:112
Saturday 10 August 2024 12:48:44 -0400 (0:00:00.056) 0:02:07.959 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_parsed": null
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116
Saturday 10 August 2024 12:48:44 -0400 (0:00:00.035) 0:02:07.995 *******
changed: [managed_node1] => {
"changed": true,
"cmd": [
"podman",
"image",
"prune",
"--all",
"-f"
],
"delta": "0:00:00.033872",
"end": "2024-08-10 12:48:44.460054",
"rc": 0,
"start": "2024-08-10 12:48:44.426182"
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:127
Saturday 10 August 2024 12:48:44 -0400 (0:00:00.415) 0:02:08.411 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 10 August 2024 12:48:44 -0400 (0:00:00.065) 0:02:08.477 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 10 August 2024 12:48:44 -0400 (0:00:00.036) 0:02:08.513 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 10 August 2024 12:48:44 -0400 (0:00:00.035) 0:02:08.549 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - images] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:137
Saturday 10 August 2024 12:48:44 -0400 (0:00:00.035) 0:02:08.584 *******
ok: [managed_node1] => {
"changed": false,
"cmd": [
"podman",
"images",
"-n"
],
"delta": "0:00:00.035009",
"end": "2024-08-10 12:48:45.047623",
"rc": 0,
"start": "2024-08-10 12:48:45.012614"
}
STDOUT:
quay.io/linux-system-roles/mysql 5.6 dd3b2a5dcb48 2 years ago 308 MB
TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:146
Saturday 10 August 2024 12:48:45 -0400 (0:00:00.416) 0:02:09.001 *******
ok: [managed_node1] => {
"changed": false,
"cmd": [
"podman",
"volume",
"ls",
"-n"
],
"delta": "0:00:00.033062",
"end": "2024-08-10 12:48:45.463743",
"rc": 0,
"start": "2024-08-10 12:48:45.430681"
}
STDOUT:
local systemd-quadlet-demo-mysql
local wp-pv-claim
local envoy-proxy-config
local envoy-certificates
TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:155
Saturday 10 August 2024 12:48:45 -0400 (0:00:00.414) 0:02:09.415 *******
ok: [managed_node1] => {
"changed": false,
"cmd": [
"podman",
"ps",
"--noheading"
],
"delta": "0:00:00.036871",
"end": "2024-08-10 12:48:45.880656",
"rc": 0,
"start": "2024-08-10 12:48:45.843785"
}
STDOUT:
77ecf6ddf9f0 quay.io/linux-system-roles/mysql:5.6 mysqld About a minute ago Up About a minute (healthy) quadlet-demo-mysql
TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:164
Saturday 10 August 2024 12:48:45 -0400 (0:00:00.416) 0:02:09.832 *******
ok: [managed_node1] => {
"changed": false,
"cmd": [
"podman",
"network",
"ls",
"-n",
"-q"
],
"delta": "0:00:00.057209",
"end": "2024-08-10 12:48:46.320065",
"rc": 0,
"start": "2024-08-10 12:48:46.262856"
}
STDOUT:
podman
podman-default-kube-network
systemd-quadlet-demo
TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:173
Saturday 10 August 2024 12:48:46 -0400 (0:00:00.439) 0:02:10.271 *******
ok: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - services] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:183
Saturday 10 August 2024 12:48:46 -0400 (0:00:00.413) 0:02:10.685 *******
ok: [managed_node1] => {
"ansible_facts": {
"services": {
"77ecf6ddf9f0a56506ba9d...2af32e2713127dc8d66f8b9853e38f9021fc6.service": {
"name": "77ecf6ddf9f0a56506ba9d...2af32e2713127dc8d66f8b9853e38f9021fc6.service",
"source": "systemd",
"state": "inactive",
"status": "transient"
},
"77ecf6ddf9f0a56506ba9dd67ea2af32e2713127dc8d66f8b9853e38f9021fc6.service": {
"name": "77ecf6ddf9f0a56506ba9dd67ea2af32e2713127dc8d66f8b9853e38f9021fc6.service",
"source": "systemd",
"state": "stopped",
"status": "unknown"
},
"NetworkManager-dispatcher.service": {
"name": "NetworkManager-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"NetworkManager-wait-online.service": {
"name": "NetworkManager-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"NetworkManager.service": {
"name": "NetworkManager.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"auditd.service": {
"name": "auditd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"auth-rpcgss-module.service": {
"name": "auth-rpcgss-module.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"autovt@.service": {
"name": "autovt@.service",
"source": "systemd",
"state": "unknown",
"status": "enabled"
},
"certmonger.service": {
"name": "certmonger.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"chrony-dnssrv@.service": {
"name": "chrony-dnssrv@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"chrony-wait.service": {
"name": "chrony-wait.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"chronyd.service": {
"name": "chronyd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"cloud-config.service": {
"name": "cloud-config.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-final.service": {
"name": "cloud-final.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init-hotplugd.service": {
"name": "cloud-init-hotplugd.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"cloud-init-local.service": {
"name": "cloud-init-local.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init.service": {
"name": "cloud-init.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cni-dhcp.service": {
"name": "cni-dhcp.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"console-getty.service": {
"name": "console-getty.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"container-getty@.service": {
"name": "container-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"cpupower.service": {
"name": "cpupower.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"crond.service": {
"name": "crond.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"dbus-org.fedoraproject.FirewallD1.service": {
"name": "dbus-org.fedoraproject.FirewallD1.service",
"source": "systemd",
"state": "active",
"status": "enabled"
},
"dbus-org.freedesktop.hostname1.service": {
"name": "dbus-org.freedesktop.hostname1.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dbus-org.freedesktop.locale1.service": {
"name": "dbus-org.freedesktop.locale1.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dbus-org.freedesktop.login1.service": {
"name": "dbus-org.freedesktop.login1.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dbus-org.freedesktop.nm-dispatcher.service": {
"name": "dbus-org.freedesktop.nm-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"dbus-org.freedesktop.portable1.service": {
"name": "dbus-org.freedesktop.portable1.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dbus-org.freedesktop.timedate1.service": {
"name": "dbus-org.freedesktop.timedate1.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"dbus.service": {
"name": "dbus.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"debug-shell.service": {
"name": "debug-shell.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dnf-makecache.service": {
"name": "dnf-makecache.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dnf-system-upgrade-cleanup.service": {
"name": "dnf-system-upgrade-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dnf-system-upgrade.service": {
"name": "dnf-system-upgrade.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dnsmasq.service": {
"name": "dnsmasq.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dracut-cmdline.service": {
"name": "dracut-cmdline.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-initqueue.service": {
"name": "dracut-initqueue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-mount.service": {
"name": "dracut-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-mount.service": {
"name": "dracut-pre-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-pivot.service": {
"name": "dracut-pre-pivot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-trigger.service": {
"name": "dracut-pre-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-udev.service": {
"name": "dracut-pre-udev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown-onfailure.service": {
"name": "dracut-shutdown-onfailure.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown.service": {
"name": "dracut-shutdown.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ebtables.service": {
"name": "ebtables.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"emergency.service": {
"name": "emergency.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"firewalld.service": {
"name": "firewalld.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"fstrim.service": {
"name": "fstrim.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"getty@.service": {
"name": "getty@.service",
"source": "systemd",
"state": "unknown",
"status": "enabled"
},
"getty@tty1.service": {
"name": "getty@tty1.service",
"source": "systemd",
"state": "running",
"status": "unknown"
},
"grub-boot-indeterminate.service": {
"name": "grub-boot-indeterminate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"gssproxy.service": {
"name": "gssproxy.service",
"source": "systemd",
"state": "running",
"status": "disabled"
},
"halt-local.service": {
"name": "halt-local.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"import-state.service": {
"name": "import-state.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"initrd-cleanup.service": {
"name": "initrd-cleanup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-parse-etc.service": {
"name": "initrd-parse-etc.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-switch-root.service": {
"name": "initrd-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-udevadm-cleanup-db.service": {
"name": "initrd-udevadm-cleanup-db.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"iprdump.service": {
"name": "iprdump.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"iprinit.service": {
"name": "iprinit.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"iprupdate.service": {
"name": "iprupdate.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"irqbalance.service": {
"name": "irqbalance.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"kdump.service": {
"name": "kdump.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"kmod-static-nodes.service": {
"name": "kmod-static-nodes.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"kvm_stat.service": {
"name": "kvm_stat.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"ldconfig.service": {
"name": "ldconfig.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"loadmodules.service": {
"name": "loadmodules.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"man-db-cache-update.service": {
"name": "man-db-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"man-db-restart-cache-update.service": {
"name": "man-db-restart-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"messagebus.service": {
"name": "messagebus.service",
"source": "systemd",
"state": "active",
"status": "static"
},
"microcode.service": {
"name": "microcode.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"nfs-blkmap.service": {
"name": "nfs-blkmap.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-convert.service": {
"name": "nfs-convert.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-idmapd.service": {
"name": "nfs-idmapd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-mountd.service": {
"name": "nfs-mountd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-server.service": {
"name": "nfs-server.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"nfs-utils.service": {
"name": "nfs-utils.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfsdcld.service": {
"name": "nfsdcld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nftables.service": {
"name": "nftables.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"nis-domainname.service": {
"name": "nis-domainname.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"oddjobd.service": {
"name": "oddjobd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"plymouth-halt.service": {
"name": "plymouth-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-kexec.service": {
"name": "plymouth-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-poweroff.service": {
"name": "plymouth-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-quit-wait.service": {
"name": "plymouth-quit-wait.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-quit.service": {
"name": "plymouth-quit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-read-write.service": {
"name": "plymouth-read-write.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-reboot.service": {
"name": "plymouth-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-start.service": {
"name": "plymouth-start.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-switch-root-initramfs.service": {
"name": "plymouth-switch-root-initramfs.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-switch-root.service": {
"name": "plymouth-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"podman-auto-update.service": {
"name": "podman-auto-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman-clean-transient.service": {
"name": "podman-clean-transient.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman-kube@.service": {
"name": "podman-kube@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"podman-restart.service": {
"name": "podman-restart.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman.service": {
"name": "podman.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"polkit.service": {
"name": "polkit.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"qemu-guest-agent.service": {
"name": "qemu-guest-agent.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"quadlet-demo-mysql-volume.service": {
"name": "quadlet-demo-mysql-volume.service",
"source": "systemd",
"state": "stopped",
"status": "generated"
},
"quadlet-demo-mysql.service": {
"name": "quadlet-demo-mysql.service",
"source": "systemd",
"state": "running",
"status": "generated"
},
"quadlet-demo-network.service": {
"name": "quadlet-demo-network.service",
"source": "systemd",
"state": "stopped",
"status": "generated"
},
"quotaon.service": {
"name": "quotaon.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"rc-local.service": {
"name": "rc-local.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rdisc.service": {
"name": "rdisc.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"rescue.service": {
"name": "rescue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"restraintd.service": {
"name": "restraintd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rngd.service": {
"name": "rngd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rpc-gssd.service": {
"name": "rpc-gssd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd-notify.service": {
"name": "rpc-statd-notify.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd.service": {
"name": "rpc-statd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpcbind.service": {
"name": "rpcbind.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rsyslog.service": {
"name": "rsyslog.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"selinux-autorelabel-mark.service": {
"name": "selinux-autorelabel-mark.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"selinux-autorelabel.service": {
"name": "selinux-autorelabel.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"serial-getty@.service": {
"name": "serial-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"sshd-keygen@.service": {
"name": "sshd-keygen@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"sshd-keygen@ecdsa.service": {
"name": "sshd-keygen@ecdsa.service",
"source": "systemd",
"state": "stopped",
"status": "unknown"
},
"sshd-keygen@ed25519.service": {
"name": "sshd-keygen@ed25519.service",
"source": "systemd",
"state": "stopped",
"status": "unknown"
},
"sshd-keygen@rsa.service": {
"name": "sshd-keygen@rsa.service",
"source": "systemd",
"state": "stopped",
"status": "unknown"
},
"sshd.service": {
"name": "sshd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"sshd@.service": {
"name": "sshd@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"sssd-autofs.service": {
"name": "sssd-autofs.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-kcm.service": {
"name": "sssd-kcm.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"sssd-nss.service": {
"name": "sssd-nss.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pac.service": {
"name": "sssd-pac.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pam.service": {
"name": "sssd-pam.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-ssh.service": {
"name": "sssd-ssh.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-sudo.service": {
"name": "sssd-sudo.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd.service": {
"name": "sssd.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"syslog.service": {
"name": "syslog.service",
"source": "systemd",
"state": "active",
"status": "enabled"
},
"system-update-cleanup.service": {
"name": "system-update-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-ask-password-console.service": {
"name": "systemd-ask-password-console.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-plymouth.service": {
"name": "systemd-ask-password-plymouth.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-wall.service": {
"name": "systemd-ask-password-wall.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-backlight@.service": {
"name": "systemd-backlight@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-binfmt.service": {
"name": "systemd-binfmt.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-coredump@.service": {
"name": "systemd-coredump@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-exit.service": {
"name": "systemd-exit.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-firstboot.service": {
"name": "systemd-firstboot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck-root.service": {
"name": "systemd-fsck-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck@.service": {
"name": "systemd-fsck@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-halt.service": {
"name": "systemd-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hibernate-resume@.service": {
"name": "systemd-hibernate-resume@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-hibernate.service": {
"name": "systemd-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hostnamed.service": {
"name": "systemd-hostnamed.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hwdb-update.service": {
"name": "systemd-hwdb-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hybrid-sleep.service": {
"name": "systemd-hybrid-sleep.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-initctl.service": {
"name": "systemd-initctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-catalog-update.service": {
"name": "systemd-journal-catalog-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-flush.service": {
"name": "systemd-journal-flush.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journald.service": {
"name": "systemd-journald.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-kexec.service": {
"name": "systemd-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-localed.service": {
"name": "systemd-localed.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-logind.service": {
"name": "systemd-logind.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-machine-id-commit.service": {
"name": "systemd-machine-id-commit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-modules-load.service": {
"name": "systemd-modules-load.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-portabled.service": {
"name": "systemd-portabled.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-poweroff.service": {
"name": "systemd-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-pstore.service": {
"name": "systemd-pstore.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-quotacheck.service": {
"name": "systemd-quotacheck.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-random-seed.service": {
"name": "systemd-random-seed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-reboot.service": {
"name": "systemd-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-remount-fs.service": {
"name": "systemd-remount-fs.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-resolved.service": {
"name": "systemd-resolved.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-rfkill.service": {
"name": "systemd-rfkill.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend-then-hibernate.service": {
"name": "systemd-suspend-then-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend.service": {
"name": "systemd-suspend.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-sysctl.service": {
"name": "systemd-sysctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-sysusers.service": {
"name": "systemd-sysusers.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-timedated.service": {
"name": "systemd-timedated.service",
"source": "systemd",
"state": "inactive",
"status": "masked"
},
"systemd-tmpfiles-clean.service": {
"name": "systemd-tmpfiles-clean.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup-dev.service": {
"name": "systemd-tmpfiles-setup-dev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup.service": {
"name": "systemd-tmpfiles-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udev-settle.service": {
"name": "systemd-udev-settle.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-udev-trigger.service": {
"name": "systemd-udev-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udevd.service": {
"name": "systemd-udevd.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-update-done.service": {
"name": "systemd-update-done.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp-runlevel.service": {
"name": "systemd-update-utmp-runlevel.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp.service": {
"name": "systemd-update-utmp.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-user-sessions.service": {
"name": "systemd-user-sessions.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-vconsole-setup.service": {
"name": "systemd-vconsole-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-volatile-root.service": {
"name": "systemd-volatile-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"tcsd.service": {
"name": "tcsd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"teamd@.service": {
"name": "teamd@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"timedatex.service": {
"name": "timedatex.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"tuned.service": {
"name": "tuned.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"unbound-anchor.service": {
"name": "unbound-anchor.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"user-runtime-dir@.service": {
"name": "user-runtime-dir@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user-runtime-dir@0.service": {
"name": "user-runtime-dir@0.service",
"source": "systemd",
"state": "stopped",
"status": "unknown"
},
"user@.service": {
"name": "user@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user@0.service": {
"name": "user@0.service",
"source": "systemd",
"state": "running",
"status": "unknown"
}
}
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:114
Saturday 10 August 2024 12:48:48 -0400 (0:00:01.696) 0:02:12.381 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Saturday 10 August 2024 12:48:48 -0400 (0:00:00.036) 0:02:12.417 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Container]\nImage=quay.io/linux-system-roles/mysql:5.6\nContainerName=quadlet-demo-mysql\nVolume=quadlet-demo-mysql.volume:/var/lib/mysql\nVolume=/tmp/quadlet_demo:/var/lib/quadlet_demo:Z\nNetwork=quadlet-demo.network\nSecret=mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD\nHealthCmd=/bin/true\nHealthOnFailure=kill\n",
"__podman_quadlet_template_src": "quadlet-demo-mysql.container.j2"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Saturday 10 August 2024 12:48:48 -0400 (0:00:00.183) 0:02:12.601 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "absent",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Saturday 10 August 2024 12:48:48 -0400 (0:00:00.045) 0:02:12.646 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Saturday 10 August 2024 12:48:48 -0400 (0:00:00.038) 0:02:12.685 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo-mysql",
"__podman_quadlet_type": "container",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Saturday 10 August 2024 12:48:48 -0400 (0:00:00.051) 0:02:12.736 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 10 August 2024 12:48:48 -0400 (0:00:00.068) 0:02:12.805 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 10 August 2024 12:48:48 -0400 (0:00:00.045) 0:02:12.851 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 10 August 2024 12:48:49 -0400 (0:00:00.044) 0:02:12.895 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get group information] ****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Saturday 10 August 2024 12:48:49 -0400 (0:00:00.048) 0:02:12.943 *******
ok: [managed_node1] => {
"ansible_facts": {
"getent_group": {
"root": [
"x",
"0",
""
]
}
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set group name] ***********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35
Saturday 10 August 2024 12:48:49 -0400 (0:00:00.383) 0:02:13.327 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_group_name": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Saturday 10 August 2024 12:48:49 -0400 (0:00:00.046) 0:02:13.374 *******
ok: [managed_node1] => {
"changed": false,
"stat": {
"atime": 1723307429.1990302,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b",
"ctime": 1723307390.9915028,
"dev": 51713,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 6986657,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-sharedlib",
"mode": "0755",
"mtime": 1700557386.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 12640,
"uid": 0,
"version": "1934096266",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check user with getsubids] ************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:50
Saturday 10 August 2024 12:48:49 -0400 (0:00:00.372) 0:02:13.747 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check group with getsubids] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:55
Saturday 10 August 2024 12:48:49 -0400 (0:00:00.035) 0:02:13.783 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:60
Saturday 10 August 2024 12:48:49 -0400 (0:00:00.036) 0:02:13.819 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74
Saturday 10 August 2024 12:48:50 -0400 (0:00:00.071) 0:02:13.891 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79
Saturday 10 August 2024 12:48:50 -0400 (0:00:00.037) 0:02:13.928 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:84
Saturday 10 August 2024 12:48:50 -0400 (0:00:00.035) 0:02:13.963 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:94
Saturday 10 August 2024 12:48:50 -0400 (0:00:00.036) 0:02:13.999 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if group not in subgid file] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:101
Saturday 10 August 2024 12:48:50 -0400 (0:00:00.036) 0:02:14.035 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Saturday 10 August 2024 12:48:50 -0400 (0:00:00.035) 0:02:14.071 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [
"quay.io/linux-system-roles/mysql:5.6"
],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "quadlet-demo-mysql.service",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Saturday 10 August 2024 12:48:50 -0400 (0:00:00.066) 0:02:14.137 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Saturday 10 August 2024 12:48:50 -0400 (0:00:00.038) 0:02:14.176 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Saturday 10 August 2024 12:48:50 -0400 (0:00:00.036) 0:02:14.213 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_images": [
"quay.io/linux-system-roles/mysql:5.6"
],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.container",
"__podman_volumes": [
"/tmp/quadlet_demo"
]
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:103
Saturday 10 August 2024 12:48:50 -0400 (0:00:00.084) 0:02:14.297 *******
ok: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:110
Saturday 10 August 2024 12:48:50 -0400 (0:00:00.041) 0:02:14.338 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4
Saturday 10 August 2024 12:48:50 -0400 (0:00:00.084) 0:02:14.423 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stop and disable service] *************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12
Saturday 10 August 2024 12:48:50 -0400 (0:00:00.035) 0:02:14.458 *******
changed: [managed_node1] => {
"changed": true,
"enabled": false,
"failed_when_result": false,
"name": "quadlet-demo-mysql.service",
"state": "stopped",
"status": {
"ActiveEnterTimestamp": "Sat 2024-08-10 12:47:28 EDT",
"ActiveEnterTimestampMonotonic": "313420044",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "active",
"After": "system.slice tmp.mount systemd-journald.socket quadlet-demo-mysql-volume.service basic.target -.mount quadlet-demo-network.service sysinit.target",
"AllowIsolate": "no",
"AllowedCPUs": "",
"AllowedMemoryNodes": "",
"AmbientCapabilities": "",
"AssertResult": "yes",
"AssertTimestamp": "Sat 2024-08-10 12:47:27 EDT",
"AssertTimestampMonotonic": "313022834",
"Before": "shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "no",
"CPUAffinity": "",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf",
"CollectMode": "inactive",
"ConditionResult": "yes",
"ConditionTimestamp": "Sat 2024-08-10 12:47:27 EDT",
"ConditionTimestampMonotonic": "313022833",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlGroup": "/system.slice/quadlet-demo-mysql.service",
"ControlPID": "0",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "yes",
"DelegateControllers": "cpu cpuacct cpuset io blkio memory devices pids",
"Description": "quadlet-demo-mysql.service",
"DevicePolicy": "auto",
"DynamicUser": "no",
"EffectiveCPUs": "0-1",
"EffectiveMemoryNodes": "0",
"Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "56530",
"ExecMainStartTimestamp": "Sat 2024-08-10 12:47:28 EDT",
"ExecMainStartTimestampMonotonic": "313419996",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name=quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network=systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FragmentPath": "/run/systemd/generator/quadlet-demo-mysql.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOSchedulingClass": "0",
"IOSchedulingPriority": "0",
"IOWeight": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "18446744073709551615",
"IPEgressPackets": "18446744073709551615",
"IPIngressBytes": "18446744073709551615",
"IPIngressPackets": "18446744073709551615",
"Id": "quadlet-demo-mysql.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestamp": "Sat 2024-08-10 12:47:27 EDT",
"InactiveExitTimestampMonotonic": "313027724",
"InvocationID": "203e253b8cab439db2f471350b090537",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "0",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "65536",
"LimitMEMLOCKSoft": "65536",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "262144",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14003",
"LimitNPROCSoft": "14003",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14003",
"LimitSIGPENDINGSoft": "14003",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "56530",
"MemoryAccounting": "yes",
"MemoryCurrent": "604098560",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"MountFlags": "",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAMask": "",
"NUMAPolicy": "n/a",
"Names": "quadlet-demo-mysql.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "all",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"PermissionsStartOnly": "no",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "-.mount sysinit.target quadlet-demo-network.service quadlet-demo-mysql-volume.service system.slice",
"RequiresMountsFor": "/tmp/quadlet_demo /run/containers",
"Restart": "no",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.container",
"StandardError": "inherit",
"StandardInput": "null",
"StandardInputData": "",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestamp": "Sat 2024-08-10 12:47:28 EDT",
"StateChangeTimestampMonotonic": "313420044",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "running",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogIdentifier": "quadlet-demo-mysql",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "0",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "24",
"TasksMax": "22405",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "notify",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "generated",
"UtmpMode": "init",
"WatchdogTimestamp": "Sat 2024-08-10 12:47:28 EDT",
"WatchdogTimestampMonotonic": "313420041",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.podman : See if quadlet file exists] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33
Saturday 10 August 2024 12:48:53 -0400 (0:00:02.483) 0:02:16.941 *******
ok: [managed_node1] => {
"changed": false,
"stat": {
"atime": 1723308447.3556578,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 8,
"charset": "us-ascii",
"checksum": "ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4",
"ctime": 1723308446.761652,
"dev": 51713,
"device_type": 0,
"executable": false,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 421527722,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "text/plain",
"mode": "0644",
"mtime": 1723308446.4946494,
"nlink": 1,
"path": "/etc/containers/systemd/quadlet-demo-mysql.container",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 363,
"uid": 0,
"version": "168323515",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": false,
"xoth": false,
"xusr": false
}
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38
Saturday 10 August 2024 12:48:53 -0400 (0:00:00.382) 0:02:17.324 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Slurp quadlet file] *******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6
Saturday 10 August 2024 12:48:53 -0400 (0:00:00.067) 0:02:17.391 *******
ok: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12
Saturday 10 August 2024 12:48:53 -0400 (0:00:00.369) 0:02:17.760 *******
ok: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44
Saturday 10 August 2024 12:48:53 -0400 (0:00:00.053) 0:02:17.814 *******
skipping: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Reset raw variable] *******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52
Saturday 10 August 2024 12:48:53 -0400 (0:00:00.066) 0:02:17.881 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_raw": null
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Remove quadlet file] ******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42
Saturday 10 August 2024 12:48:54 -0400 (0:00:00.036) 0:02:17.917 *******
changed: [managed_node1] => {
"changed": true,
"path": "/etc/containers/systemd/quadlet-demo-mysql.container",
"state": "absent"
}
TASK [fedora.linux_system_roles.podman : Refresh systemd] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48
Saturday 10 August 2024 12:48:54 -0400 (0:00:00.378) 0:02:18.296 *******
ok: [managed_node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Remove managed resource] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58
Saturday 10 August 2024 12:48:55 -0400 (0:00:00.630) 0:02:18.926 *******
ok: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Remove volumes] ***********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:95
Saturday 10 August 2024 12:48:55 -0400 (0:00:00.431) 0:02:19.358 *******
skipping: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] *********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:112
Saturday 10 August 2024 12:48:55 -0400 (0:00:00.052) 0:02:19.411 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_parsed": null
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116
Saturday 10 August 2024 12:48:55 -0400 (0:00:00.035) 0:02:19.446 *******
changed: [managed_node1] => {
"changed": true,
"cmd": [
"podman",
"image",
"prune",
"--all",
"-f"
],
"delta": "0:00:00.276357",
"end": "2024-08-10 12:48:56.153368",
"rc": 0,
"start": "2024-08-10 12:48:55.877011"
}
STDOUT:
dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:127
Saturday 10 August 2024 12:48:56 -0400 (0:00:00.658) 0:02:20.104 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 10 August 2024 12:48:56 -0400 (0:00:00.097) 0:02:20.201 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 10 August 2024 12:48:56 -0400 (0:00:00.036) 0:02:20.238 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 10 August 2024 12:48:56 -0400 (0:00:00.035) 0:02:20.273 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - images] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:137
Saturday 10 August 2024 12:48:56 -0400 (0:00:00.036) 0:02:20.309 *******
ok: [managed_node1] => {
"changed": false,
"cmd": [
"podman",
"images",
"-n"
],
"delta": "0:00:00.038771",
"end": "2024-08-10 12:48:56.777164",
"rc": 0,
"start": "2024-08-10 12:48:56.738393"
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:146
Saturday 10 August 2024 12:48:56 -0400 (0:00:00.419) 0:02:20.729 *******
ok: [managed_node1] => {
"changed": false,
"cmd": [
"podman",
"volume",
"ls",
"-n"
],
"delta": "0:00:00.035508",
"end": "2024-08-10 12:48:57.194098",
"rc": 0,
"start": "2024-08-10 12:48:57.158590"
}
STDOUT:
local systemd-quadlet-demo-mysql
local wp-pv-claim
local envoy-proxy-config
local envoy-certificates
TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:155
Saturday 10 August 2024 12:48:57 -0400 (0:00:00.416) 0:02:21.145 *******
ok: [managed_node1] => {
"changed": false,
"cmd": [
"podman",
"ps",
"--noheading"
],
"delta": "0:00:00.034437",
"end": "2024-08-10 12:48:57.609016",
"rc": 0,
"start": "2024-08-10 12:48:57.574579"
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:164
Saturday 10 August 2024 12:48:57 -0400 (0:00:00.415) 0:02:21.560 *******
ok: [managed_node1] => {
"changed": false,
"cmd": [
"podman",
"network",
"ls",
"-n",
"-q"
],
"delta": "0:00:00.066251",
"end": "2024-08-10 12:48:58.058148",
"rc": 0,
"start": "2024-08-10 12:48:57.991897"
}
STDOUT:
podman
podman-default-kube-network
systemd-quadlet-demo
TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:173
Saturday 10 August 2024 12:48:58 -0400 (0:00:00.449) 0:02:22.010 *******
ok: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - services] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:183
Saturday 10 August 2024 12:48:58 -0400 (0:00:00.417) 0:02:22.427 *******
ok: [managed_node1] => {
"ansible_facts": {
"services": {
"NetworkManager-dispatcher.service": {
"name": "NetworkManager-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"NetworkManager-wait-online.service": {
"name": "NetworkManager-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"NetworkManager.service": {
"name": "NetworkManager.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"auditd.service": {
"name": "auditd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"auth-rpcgss-module.service": {
"name": "auth-rpcgss-module.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"autovt@.service": {
"name": "autovt@.service",
"source": "systemd",
"state": "unknown",
"status": "enabled"
},
"certmonger.service": {
"name": "certmonger.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"chrony-dnssrv@.service": {
"name": "chrony-dnssrv@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"chrony-wait.service": {
"name": "chrony-wait.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"chronyd.service": {
"name": "chronyd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"cloud-config.service": {
"name": "cloud-config.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-final.service": {
"name": "cloud-final.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init-hotplugd.service": {
"name": "cloud-init-hotplugd.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"cloud-init-local.service": {
"name": "cloud-init-local.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init.service": {
"name": "cloud-init.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cni-dhcp.service": {
"name": "cni-dhcp.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"console-getty.service": {
"name": "console-getty.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"container-getty@.service": {
"name": "container-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"cpupower.service": {
"name": "cpupower.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"crond.service": {
"name": "crond.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"dbus-org.fedoraproject.FirewallD1.service": {
"name": "dbus-org.fedoraproject.FirewallD1.service",
"source": "systemd",
"state": "active",
"status": "enabled"
},
"dbus-org.freedesktop.hostname1.service": {
"name": "dbus-org.freedesktop.hostname1.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dbus-org.freedesktop.locale1.service": {
"name": "dbus-org.freedesktop.locale1.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dbus-org.freedesktop.login1.service": {
"name": "dbus-org.freedesktop.login1.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dbus-org.freedesktop.nm-dispatcher.service": {
"name": "dbus-org.freedesktop.nm-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"dbus-org.freedesktop.portable1.service": {
"name": "dbus-org.freedesktop.portable1.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dbus-org.freedesktop.timedate1.service": {
"name": "dbus-org.freedesktop.timedate1.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"dbus.service": {
"name": "dbus.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"debug-shell.service": {
"name": "debug-shell.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dnf-makecache.service": {
"name": "dnf-makecache.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dnf-system-upgrade-cleanup.service": {
"name": "dnf-system-upgrade-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dnf-system-upgrade.service": {
"name": "dnf-system-upgrade.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dnsmasq.service": {
"name": "dnsmasq.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dracut-cmdline.service": {
"name": "dracut-cmdline.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-initqueue.service": {
"name": "dracut-initqueue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-mount.service": {
"name": "dracut-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-mount.service": {
"name": "dracut-pre-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-pivot.service": {
"name": "dracut-pre-pivot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-trigger.service": {
"name": "dracut-pre-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-udev.service": {
"name": "dracut-pre-udev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown-onfailure.service": {
"name": "dracut-shutdown-onfailure.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown.service": {
"name": "dracut-shutdown.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ebtables.service": {
"name": "ebtables.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"emergency.service": {
"name": "emergency.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"firewalld.service": {
"name": "firewalld.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"fstrim.service": {
"name": "fstrim.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"getty@.service": {
"name": "getty@.service",
"source": "systemd",
"state": "unknown",
"status": "enabled"
},
"getty@tty1.service": {
"name": "getty@tty1.service",
"source": "systemd",
"state": "running",
"status": "unknown"
},
"grub-boot-indeterminate.service": {
"name": "grub-boot-indeterminate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"gssproxy.service": {
"name": "gssproxy.service",
"source": "systemd",
"state": "running",
"status": "disabled"
},
"halt-local.service": {
"name": "halt-local.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"import-state.service": {
"name": "import-state.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"initrd-cleanup.service": {
"name": "initrd-cleanup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-parse-etc.service": {
"name": "initrd-parse-etc.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-switch-root.service": {
"name": "initrd-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-udevadm-cleanup-db.service": {
"name": "initrd-udevadm-cleanup-db.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"iprdump.service": {
"name": "iprdump.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"iprinit.service": {
"name": "iprinit.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"iprupdate.service": {
"name": "iprupdate.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"irqbalance.service": {
"name": "irqbalance.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"kdump.service": {
"name": "kdump.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"kmod-static-nodes.service": {
"name": "kmod-static-nodes.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"kvm_stat.service": {
"name": "kvm_stat.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"ldconfig.service": {
"name": "ldconfig.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"loadmodules.service": {
"name": "loadmodules.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"man-db-cache-update.service": {
"name": "man-db-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"man-db-restart-cache-update.service": {
"name": "man-db-restart-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"messagebus.service": {
"name": "messagebus.service",
"source": "systemd",
"state": "active",
"status": "static"
},
"microcode.service": {
"name": "microcode.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"nfs-blkmap.service": {
"name": "nfs-blkmap.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-convert.service": {
"name": "nfs-convert.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-idmapd.service": {
"name": "nfs-idmapd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-mountd.service": {
"name": "nfs-mountd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-server.service": {
"name": "nfs-server.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"nfs-utils.service": {
"name": "nfs-utils.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfsdcld.service": {
"name": "nfsdcld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nftables.service": {
"name": "nftables.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"nis-domainname.service": {
"name": "nis-domainname.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"oddjobd.service": {
"name": "oddjobd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"plymouth-halt.service": {
"name": "plymouth-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-kexec.service": {
"name": "plymouth-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-poweroff.service": {
"name": "plymouth-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-quit-wait.service": {
"name": "plymouth-quit-wait.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-quit.service": {
"name": "plymouth-quit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-read-write.service": {
"name": "plymouth-read-write.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-reboot.service": {
"name": "plymouth-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-start.service": {
"name": "plymouth-start.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-switch-root-initramfs.service": {
"name": "plymouth-switch-root-initramfs.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-switch-root.service": {
"name": "plymouth-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"podman-auto-update.service": {
"name": "podman-auto-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman-clean-transient.service": {
"name": "podman-clean-transient.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman-kube@.service": {
"name": "podman-kube@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"podman-restart.service": {
"name": "podman-restart.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman.service": {
"name": "podman.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"polkit.service": {
"name": "polkit.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"qemu-guest-agent.service": {
"name": "qemu-guest-agent.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"quadlet-demo-mysql-volume.service": {
"name": "quadlet-demo-mysql-volume.service",
"source": "systemd",
"state": "stopped",
"status": "generated"
},
"quadlet-demo-network.service": {
"name": "quadlet-demo-network.service",
"source": "systemd",
"state": "stopped",
"status": "generated"
},
"quotaon.service": {
"name": "quotaon.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"rc-local.service": {
"name": "rc-local.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rdisc.service": {
"name": "rdisc.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"rescue.service": {
"name": "rescue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"restraintd.service": {
"name": "restraintd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rngd.service": {
"name": "rngd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rpc-gssd.service": {
"name": "rpc-gssd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd-notify.service": {
"name": "rpc-statd-notify.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd.service": {
"name": "rpc-statd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpcbind.service": {
"name": "rpcbind.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rsyslog.service": {
"name": "rsyslog.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"selinux-autorelabel-mark.service": {
"name": "selinux-autorelabel-mark.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"selinux-autorelabel.service": {
"name": "selinux-autorelabel.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"serial-getty@.service": {
"name": "serial-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"sshd-keygen@.service": {
"name": "sshd-keygen@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"sshd-keygen@ecdsa.service": {
"name": "sshd-keygen@ecdsa.service",
"source": "systemd",
"state": "stopped",
"status": "unknown"
},
"sshd-keygen@ed25519.service": {
"name": "sshd-keygen@ed25519.service",
"source": "systemd",
"state": "stopped",
"status": "unknown"
},
"sshd-keygen@rsa.service": {
"name": "sshd-keygen@rsa.service",
"source": "systemd",
"state": "stopped",
"status": "unknown"
},
"sshd.service": {
"name": "sshd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"sshd@.service": {
"name": "sshd@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"sssd-autofs.service": {
"name": "sssd-autofs.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-kcm.service": {
"name": "sssd-kcm.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"sssd-nss.service": {
"name": "sssd-nss.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pac.service": {
"name": "sssd-pac.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pam.service": {
"name": "sssd-pam.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-ssh.service": {
"name": "sssd-ssh.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-sudo.service": {
"name": "sssd-sudo.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd.service": {
"name": "sssd.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"syslog.service": {
"name": "syslog.service",
"source": "systemd",
"state": "active",
"status": "enabled"
},
"system-update-cleanup.service": {
"name": "system-update-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-ask-password-console.service": {
"name": "systemd-ask-password-console.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-plymouth.service": {
"name": "systemd-ask-password-plymouth.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-wall.service": {
"name": "systemd-ask-password-wall.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-backlight@.service": {
"name": "systemd-backlight@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-binfmt.service": {
"name": "systemd-binfmt.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-coredump@.service": {
"name": "systemd-coredump@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-exit.service": {
"name": "systemd-exit.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-firstboot.service": {
"name": "systemd-firstboot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck-root.service": {
"name": "systemd-fsck-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck@.service": {
"name": "systemd-fsck@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-halt.service": {
"name": "systemd-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hibernate-resume@.service": {
"name": "systemd-hibernate-resume@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-hibernate.service": {
"name": "systemd-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hostnamed.service": {
"name": "systemd-hostnamed.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hwdb-update.service": {
"name": "systemd-hwdb-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hybrid-sleep.service": {
"name": "systemd-hybrid-sleep.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-initctl.service": {
"name": "systemd-initctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-catalog-update.service": {
"name": "systemd-journal-catalog-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-flush.service": {
"name": "systemd-journal-flush.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journald.service": {
"name": "systemd-journald.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-kexec.service": {
"name": "systemd-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-localed.service": {
"name": "systemd-localed.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-logind.service": {
"name": "systemd-logind.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-machine-id-commit.service": {
"name": "systemd-machine-id-commit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-modules-load.service": {
"name": "systemd-modules-load.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-portabled.service": {
"name": "systemd-portabled.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-poweroff.service": {
"name": "systemd-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-pstore.service": {
"name": "systemd-pstore.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-quotacheck.service": {
"name": "systemd-quotacheck.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-random-seed.service": {
"name": "systemd-random-seed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-reboot.service": {
"name": "systemd-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-remount-fs.service": {
"name": "systemd-remount-fs.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-resolved.service": {
"name": "systemd-resolved.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-rfkill.service": {
"name": "systemd-rfkill.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend-then-hibernate.service": {
"name": "systemd-suspend-then-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend.service": {
"name": "systemd-suspend.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-sysctl.service": {
"name": "systemd-sysctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-sysusers.service": {
"name": "systemd-sysusers.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-timedated.service": {
"name": "systemd-timedated.service",
"source": "systemd",
"state": "inactive",
"status": "masked"
},
"systemd-tmpfiles-clean.service": {
"name": "systemd-tmpfiles-clean.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup-dev.service": {
"name": "systemd-tmpfiles-setup-dev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup.service": {
"name": "systemd-tmpfiles-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udev-settle.service": {
"name": "systemd-udev-settle.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-udev-trigger.service": {
"name": "systemd-udev-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udevd.service": {
"name": "systemd-udevd.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-update-done.service": {
"name": "systemd-update-done.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp-runlevel.service": {
"name": "systemd-update-utmp-runlevel.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp.service": {
"name": "systemd-update-utmp.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-user-sessions.service": {
"name": "systemd-user-sessions.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-vconsole-setup.service": {
"name": "systemd-vconsole-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-volatile-root.service": {
"name": "systemd-volatile-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"tcsd.service": {
"name": "tcsd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"teamd@.service": {
"name": "teamd@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"timedatex.service": {
"name": "timedatex.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"tuned.service": {
"name": "tuned.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"unbound-anchor.service": {
"name": "unbound-anchor.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"user-runtime-dir@.service": {
"name": "user-runtime-dir@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user-runtime-dir@0.service": {
"name": "user-runtime-dir@0.service",
"source": "systemd",
"state": "stopped",
"status": "unknown"
},
"user@.service": {
"name": "user@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user@0.service": {
"name": "user@0.service",
"source": "systemd",
"state": "running",
"status": "unknown"
}
}
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:114
Saturday 10 August 2024 12:49:00 -0400 (0:00:01.744) 0:02:24.172 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Saturday 10 August 2024 12:49:00 -0400 (0:00:00.035) 0:02:24.207 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "quadlet-demo-mysql.volume",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "[Volume]",
"__podman_quadlet_template_src": ""
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Saturday 10 August 2024 12:49:00 -0400 (0:00:00.049) 0:02:24.257 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "absent",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Saturday 10 August 2024 12:49:00 -0400 (0:00:00.046) 0:02:24.303 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Saturday 10 August 2024 12:49:00 -0400 (0:00:00.037) 0:02:24.341 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo-mysql",
"__podman_quadlet_type": "volume",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Saturday 10 August 2024 12:49:00 -0400 (0:00:00.052) 0:02:24.393 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 10 August 2024 12:49:00 -0400 (0:00:00.070) 0:02:24.464 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 10 August 2024 12:49:00 -0400 (0:00:00.046) 0:02:24.510 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 10 August 2024 12:49:00 -0400 (0:00:00.080) 0:02:24.591 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get group information] ****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Saturday 10 August 2024 12:49:00 -0400 (0:00:00.049) 0:02:24.640 *******
ok: [managed_node1] => {
"ansible_facts": {
"getent_group": {
"root": [
"x",
"0",
""
]
}
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set group name] ***********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35
Saturday 10 August 2024 12:49:01 -0400 (0:00:00.389) 0:02:25.030 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_group_name": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Saturday 10 August 2024 12:49:01 -0400 (0:00:00.048) 0:02:25.078 *******
ok: [managed_node1] => {
"changed": false,
"stat": {
"atime": 1723307429.1990302,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b",
"ctime": 1723307390.9915028,
"dev": 51713,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 6986657,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-sharedlib",
"mode": "0755",
"mtime": 1700557386.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 12640,
"uid": 0,
"version": "1934096266",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check user with getsubids] ************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:50
Saturday 10 August 2024 12:49:01 -0400 (0:00:00.382) 0:02:25.460 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check group with getsubids] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:55
Saturday 10 August 2024 12:49:01 -0400 (0:00:00.037) 0:02:25.497 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:60
Saturday 10 August 2024 12:49:01 -0400 (0:00:00.036) 0:02:25.534 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74
Saturday 10 August 2024 12:49:01 -0400 (0:00:00.034) 0:02:25.569 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79
Saturday 10 August 2024 12:49:01 -0400 (0:00:00.035) 0:02:25.605 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:84
Saturday 10 August 2024 12:49:01 -0400 (0:00:00.035) 0:02:25.640 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:94
Saturday 10 August 2024 12:49:01 -0400 (0:00:00.034) 0:02:25.675 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if group not in subgid file] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:101
Saturday 10 August 2024 12:49:01 -0400 (0:00:00.036) 0:02:25.712 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Saturday 10 August 2024 12:49:01 -0400 (0:00:00.035) 0:02:25.748 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "quadlet-demo-mysql-volume.service",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Saturday 10 August 2024 12:49:01 -0400 (0:00:00.063) 0:02:25.811 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Saturday 10 August 2024 12:49:02 -0400 (0:00:00.078) 0:02:25.890 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Saturday 10 August 2024 12:49:02 -0400 (0:00:00.037) 0:02:25.927 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_images": [],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.volume",
"__podman_volumes": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:103
Saturday 10 August 2024 12:49:02 -0400 (0:00:00.085) 0:02:26.013 *******
ok: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:110
Saturday 10 August 2024 12:49:02 -0400 (0:00:00.044) 0:02:26.057 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4
Saturday 10 August 2024 12:49:02 -0400 (0:00:00.088) 0:02:26.146 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stop and disable service] *************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12
Saturday 10 August 2024 12:49:02 -0400 (0:00:00.035) 0:02:26.182 *******
changed: [managed_node1] => {
"changed": true,
"enabled": false,
"failed_when_result": false,
"name": "quadlet-demo-mysql-volume.service",
"state": "stopped",
"status": {
"ActiveEnterTimestamp": "Sat 2024-08-10 12:47:15 EDT",
"ActiveEnterTimestampMonotonic": "300251794",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "active",
"After": "system.slice -.mount sysinit.target basic.target systemd-journald.socket",
"AllowIsolate": "no",
"AllowedCPUs": "",
"AllowedMemoryNodes": "",
"AmbientCapabilities": "",
"AssertResult": "yes",
"AssertTimestamp": "Sat 2024-08-10 12:47:15 EDT",
"AssertTimestampMonotonic": "300202981",
"Before": "shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "no",
"CPUAffinity": "",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf",
"CollectMode": "inactive",
"ConditionResult": "yes",
"ConditionTimestamp": "Sat 2024-08-10 12:47:15 EDT",
"ConditionTimestampMonotonic": "300202980",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlGroup": "/system.slice/quadlet-demo-mysql-volume.service",
"ControlPID": "0",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "quadlet-demo-mysql-volume.service",
"DevicePolicy": "auto",
"DynamicUser": "no",
"EffectiveCPUs": "",
"EffectiveMemoryNodes": "",
"ExecMainCode": "1",
"ExecMainExitTimestamp": "Sat 2024-08-10 12:47:15 EDT",
"ExecMainExitTimestampMonotonic": "300251555",
"ExecMainPID": "55138",
"ExecMainStartTimestamp": "Sat 2024-08-10 12:47:15 EDT",
"ExecMainStartTimestampMonotonic": "300203980",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FragmentPath": "/run/systemd/generator/quadlet-demo-mysql-volume.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOSchedulingClass": "0",
"IOSchedulingPriority": "0",
"IOWeight": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "18446744073709551615",
"IPEgressPackets": "18446744073709551615",
"IPIngressBytes": "18446744073709551615",
"IPIngressPackets": "18446744073709551615",
"Id": "quadlet-demo-mysql-volume.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestamp": "Sat 2024-08-10 12:47:15 EDT",
"InactiveExitTimestampMonotonic": "300204051",
"InvocationID": "c9b9a601e1d04cb0b9ef3f3627e4b188",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "control-group",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "0",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "65536",
"LimitMEMLOCKSoft": "65536",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "262144",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14003",
"LimitNPROCSoft": "14003",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14003",
"LimitSIGPENDINGSoft": "14003",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"MemoryAccounting": "yes",
"MemoryCurrent": "0",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"MountFlags": "",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAMask": "",
"NUMAPolicy": "n/a",
"Names": "quadlet-demo-mysql-volume.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"PermissionsStartOnly": "no",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"RemainAfterExit": "yes",
"RemoveIPC": "no",
"Requires": "system.slice -.mount sysinit.target",
"RequiresMountsFor": "/run/containers",
"Restart": "no",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"StandardError": "inherit",
"StandardInput": "null",
"StandardInputData": "",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestamp": "Sat 2024-08-10 12:47:15 EDT",
"StateChangeTimestampMonotonic": "300251794",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "exited",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogIdentifier": "quadlet-demo-mysql-volume",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "0",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "0",
"TasksMax": "22405",
"TimeoutStartUSec": "infinity",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "oneshot",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "generated",
"UtmpMode": "init",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.podman : See if quadlet file exists] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33
Saturday 10 August 2024 12:49:02 -0400 (0:00:00.688) 0:02:26.870 *******
ok: [managed_node1] => {
"changed": false,
"stat": {
"atime": 1723308434.5085306,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 8,
"charset": "us-ascii",
"checksum": "585f8cbdf0ec73000f9227dcffbef71e9552ea4a",
"ctime": 1723308433.8835244,
"dev": 51713,
"device_type": 0,
"executable": false,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 266338510,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "text/plain",
"mode": "0644",
"mtime": 1723308433.614522,
"nlink": 1,
"path": "/etc/containers/systemd/quadlet-demo-mysql.volume",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 9,
"uid": 0,
"version": "140388188",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": false,
"xoth": false,
"xusr": false
}
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38
Saturday 10 August 2024 12:49:03 -0400 (0:00:00.393) 0:02:27.264 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Slurp quadlet file] *******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6
Saturday 10 August 2024 12:49:03 -0400 (0:00:00.069) 0:02:27.333 *******
ok: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12
Saturday 10 August 2024 12:49:03 -0400 (0:00:00.404) 0:02:27.737 *******
ok: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44
Saturday 10 August 2024 12:49:03 -0400 (0:00:00.053) 0:02:27.791 *******
skipping: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Reset raw variable] *******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52
Saturday 10 August 2024 12:49:03 -0400 (0:00:00.037) 0:02:27.829 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_raw": null
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Remove quadlet file] ******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42
Saturday 10 August 2024 12:49:03 -0400 (0:00:00.034) 0:02:27.863 *******
changed: [managed_node1] => {
"changed": true,
"path": "/etc/containers/systemd/quadlet-demo-mysql.volume",
"state": "absent"
}
TASK [fedora.linux_system_roles.podman : Refresh systemd] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48
Saturday 10 August 2024 12:49:04 -0400 (0:00:00.386) 0:02:28.249 *******
ok: [managed_node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Remove managed resource] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58
Saturday 10 August 2024 12:49:04 -0400 (0:00:00.624) 0:02:28.874 *******
changed: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Remove volumes] ***********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:95
Saturday 10 August 2024 12:49:05 -0400 (0:00:00.455) 0:02:29.329 *******
skipping: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] *********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:112
Saturday 10 August 2024 12:49:05 -0400 (0:00:00.053) 0:02:29.383 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_parsed": null
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116
Saturday 10 August 2024 12:49:05 -0400 (0:00:00.035) 0:02:29.418 *******
changed: [managed_node1] => {
"changed": true,
"cmd": [
"podman",
"image",
"prune",
"--all",
"-f"
],
"delta": "0:00:00.036988",
"end": "2024-08-10 12:49:05.887051",
"rc": 0,
"start": "2024-08-10 12:49:05.850063"
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:127
Saturday 10 August 2024 12:49:05 -0400 (0:00:00.452) 0:02:29.870 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 10 August 2024 12:49:06 -0400 (0:00:00.067) 0:02:29.937 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 10 August 2024 12:49:06 -0400 (0:00:00.036) 0:02:29.974 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 10 August 2024 12:49:06 -0400 (0:00:00.036) 0:02:30.010 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - images] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:137
Saturday 10 August 2024 12:49:06 -0400 (0:00:00.035) 0:02:30.046 *******
ok: [managed_node1] => {
"changed": false,
"cmd": [
"podman",
"images",
"-n"
],
"delta": "0:00:00.034878",
"end": "2024-08-10 12:49:06.512095",
"rc": 0,
"start": "2024-08-10 12:49:06.477217"
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:146
Saturday 10 August 2024 12:49:06 -0400 (0:00:00.417) 0:02:30.463 *******
ok: [managed_node1] => {
"changed": false,
"cmd": [
"podman",
"volume",
"ls",
"-n"
],
"delta": "0:00:00.037753",
"end": "2024-08-10 12:49:06.934083",
"rc": 0,
"start": "2024-08-10 12:49:06.896330"
}
STDOUT:
local wp-pv-claim
local envoy-proxy-config
local envoy-certificates
TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:155
Saturday 10 August 2024 12:49:07 -0400 (0:00:00.422) 0:02:30.886 *******
ok: [managed_node1] => {
"changed": false,
"cmd": [
"podman",
"ps",
"--noheading"
],
"delta": "0:00:00.038194",
"end": "2024-08-10 12:49:07.359106",
"rc": 0,
"start": "2024-08-10 12:49:07.320912"
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:164
Saturday 10 August 2024 12:49:07 -0400 (0:00:00.425) 0:02:31.312 *******
ok: [managed_node1] => {
"changed": false,
"cmd": [
"podman",
"network",
"ls",
"-n",
"-q"
],
"delta": "0:00:00.064827",
"end": "2024-08-10 12:49:07.807133",
"rc": 0,
"start": "2024-08-10 12:49:07.742306"
}
STDOUT:
podman
podman-default-kube-network
systemd-quadlet-demo
TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:173
Saturday 10 August 2024 12:49:07 -0400 (0:00:00.446) 0:02:31.759 *******
ok: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - services] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:183
Saturday 10 August 2024 12:49:08 -0400 (0:00:00.415) 0:02:32.175 *******
ok: [managed_node1] => {
"ansible_facts": {
"services": {
"NetworkManager-dispatcher.service": {
"name": "NetworkManager-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"NetworkManager-wait-online.service": {
"name": "NetworkManager-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"NetworkManager.service": {
"name": "NetworkManager.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"auditd.service": {
"name": "auditd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"auth-rpcgss-module.service": {
"name": "auth-rpcgss-module.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"autovt@.service": {
"name": "autovt@.service",
"source": "systemd",
"state": "unknown",
"status": "enabled"
},
"certmonger.service": {
"name": "certmonger.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"chrony-dnssrv@.service": {
"name": "chrony-dnssrv@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"chrony-wait.service": {
"name": "chrony-wait.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"chronyd.service": {
"name": "chronyd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"cloud-config.service": {
"name": "cloud-config.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-final.service": {
"name": "cloud-final.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init-hotplugd.service": {
"name": "cloud-init-hotplugd.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"cloud-init-local.service": {
"name": "cloud-init-local.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init.service": {
"name": "cloud-init.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cni-dhcp.service": {
"name": "cni-dhcp.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"console-getty.service": {
"name": "console-getty.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"container-getty@.service": {
"name": "container-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"cpupower.service": {
"name": "cpupower.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"crond.service": {
"name": "crond.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"dbus-org.fedoraproject.FirewallD1.service": {
"name": "dbus-org.fedoraproject.FirewallD1.service",
"source": "systemd",
"state": "active",
"status": "enabled"
},
"dbus-org.freedesktop.hostname1.service": {
"name": "dbus-org.freedesktop.hostname1.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dbus-org.freedesktop.locale1.service": {
"name": "dbus-org.freedesktop.locale1.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dbus-org.freedesktop.login1.service": {
"name": "dbus-org.freedesktop.login1.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dbus-org.freedesktop.nm-dispatcher.service": {
"name": "dbus-org.freedesktop.nm-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"dbus-org.freedesktop.portable1.service": {
"name": "dbus-org.freedesktop.portable1.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dbus-org.freedesktop.timedate1.service": {
"name": "dbus-org.freedesktop.timedate1.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"dbus.service": {
"name": "dbus.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"debug-shell.service": {
"name": "debug-shell.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dnf-makecache.service": {
"name": "dnf-makecache.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dnf-system-upgrade-cleanup.service": {
"name": "dnf-system-upgrade-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dnf-system-upgrade.service": {
"name": "dnf-system-upgrade.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dnsmasq.service": {
"name": "dnsmasq.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dracut-cmdline.service": {
"name": "dracut-cmdline.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-initqueue.service": {
"name": "dracut-initqueue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-mount.service": {
"name": "dracut-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-mount.service": {
"name": "dracut-pre-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-pivot.service": {
"name": "dracut-pre-pivot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-trigger.service": {
"name": "dracut-pre-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-udev.service": {
"name": "dracut-pre-udev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown-onfailure.service": {
"name": "dracut-shutdown-onfailure.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown.service": {
"name": "dracut-shutdown.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ebtables.service": {
"name": "ebtables.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"emergency.service": {
"name": "emergency.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"firewalld.service": {
"name": "firewalld.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"fstrim.service": {
"name": "fstrim.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"getty@.service": {
"name": "getty@.service",
"source": "systemd",
"state": "unknown",
"status": "enabled"
},
"getty@tty1.service": {
"name": "getty@tty1.service",
"source": "systemd",
"state": "running",
"status": "unknown"
},
"grub-boot-indeterminate.service": {
"name": "grub-boot-indeterminate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"gssproxy.service": {
"name": "gssproxy.service",
"source": "systemd",
"state": "running",
"status": "disabled"
},
"halt-local.service": {
"name": "halt-local.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"import-state.service": {
"name": "import-state.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"initrd-cleanup.service": {
"name": "initrd-cleanup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-parse-etc.service": {
"name": "initrd-parse-etc.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-switch-root.service": {
"name": "initrd-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-udevadm-cleanup-db.service": {
"name": "initrd-udevadm-cleanup-db.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"iprdump.service": {
"name": "iprdump.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"iprinit.service": {
"name": "iprinit.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"iprupdate.service": {
"name": "iprupdate.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"irqbalance.service": {
"name": "irqbalance.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"kdump.service": {
"name": "kdump.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"kmod-static-nodes.service": {
"name": "kmod-static-nodes.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"kvm_stat.service": {
"name": "kvm_stat.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"ldconfig.service": {
"name": "ldconfig.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"loadmodules.service": {
"name": "loadmodules.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"man-db-cache-update.service": {
"name": "man-db-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"man-db-restart-cache-update.service": {
"name": "man-db-restart-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"messagebus.service": {
"name": "messagebus.service",
"source": "systemd",
"state": "active",
"status": "static"
},
"microcode.service": {
"name": "microcode.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"nfs-blkmap.service": {
"name": "nfs-blkmap.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-convert.service": {
"name": "nfs-convert.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-idmapd.service": {
"name": "nfs-idmapd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-mountd.service": {
"name": "nfs-mountd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-server.service": {
"name": "nfs-server.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"nfs-utils.service": {
"name": "nfs-utils.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfsdcld.service": {
"name": "nfsdcld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nftables.service": {
"name": "nftables.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"nis-domainname.service": {
"name": "nis-domainname.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"oddjobd.service": {
"name": "oddjobd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"plymouth-halt.service": {
"name": "plymouth-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-kexec.service": {
"name": "plymouth-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-poweroff.service": {
"name": "plymouth-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-quit-wait.service": {
"name": "plymouth-quit-wait.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-quit.service": {
"name": "plymouth-quit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-read-write.service": {
"name": "plymouth-read-write.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-reboot.service": {
"name": "plymouth-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-start.service": {
"name": "plymouth-start.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-switch-root-initramfs.service": {
"name": "plymouth-switch-root-initramfs.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-switch-root.service": {
"name": "plymouth-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"podman-auto-update.service": {
"name": "podman-auto-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman-clean-transient.service": {
"name": "podman-clean-transient.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman-kube@.service": {
"name": "podman-kube@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"podman-restart.service": {
"name": "podman-restart.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman.service": {
"name": "podman.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"polkit.service": {
"name": "polkit.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"qemu-guest-agent.service": {
"name": "qemu-guest-agent.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"quadlet-demo-network.service": {
"name": "quadlet-demo-network.service",
"source": "systemd",
"state": "stopped",
"status": "generated"
},
"quotaon.service": {
"name": "quotaon.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"rc-local.service": {
"name": "rc-local.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rdisc.service": {
"name": "rdisc.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"rescue.service": {
"name": "rescue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"restraintd.service": {
"name": "restraintd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rngd.service": {
"name": "rngd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rpc-gssd.service": {
"name": "rpc-gssd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd-notify.service": {
"name": "rpc-statd-notify.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd.service": {
"name": "rpc-statd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpcbind.service": {
"name": "rpcbind.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rsyslog.service": {
"name": "rsyslog.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"selinux-autorelabel-mark.service": {
"name": "selinux-autorelabel-mark.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"selinux-autorelabel.service": {
"name": "selinux-autorelabel.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"serial-getty@.service": {
"name": "serial-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"sshd-keygen@.service": {
"name": "sshd-keygen@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"sshd-keygen@ecdsa.service": {
"name": "sshd-keygen@ecdsa.service",
"source": "systemd",
"state": "stopped",
"status": "unknown"
},
"sshd-keygen@ed25519.service": {
"name": "sshd-keygen@ed25519.service",
"source": "systemd",
"state": "stopped",
"status": "unknown"
},
"sshd-keygen@rsa.service": {
"name": "sshd-keygen@rsa.service",
"source": "systemd",
"state": "stopped",
"status": "unknown"
},
"sshd.service": {
"name": "sshd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"sshd@.service": {
"name": "sshd@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"sssd-autofs.service": {
"name": "sssd-autofs.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-kcm.service": {
"name": "sssd-kcm.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"sssd-nss.service": {
"name": "sssd-nss.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pac.service": {
"name": "sssd-pac.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pam.service": {
"name": "sssd-pam.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-ssh.service": {
"name": "sssd-ssh.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-sudo.service": {
"name": "sssd-sudo.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd.service": {
"name": "sssd.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"syslog.service": {
"name": "syslog.service",
"source": "systemd",
"state": "active",
"status": "enabled"
},
"system-update-cleanup.service": {
"name": "system-update-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-ask-password-console.service": {
"name": "systemd-ask-password-console.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-plymouth.service": {
"name": "systemd-ask-password-plymouth.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-wall.service": {
"name": "systemd-ask-password-wall.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-backlight@.service": {
"name": "systemd-backlight@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-binfmt.service": {
"name": "systemd-binfmt.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-coredump@.service": {
"name": "systemd-coredump@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-exit.service": {
"name": "systemd-exit.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-firstboot.service": {
"name": "systemd-firstboot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck-root.service": {
"name": "systemd-fsck-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck@.service": {
"name": "systemd-fsck@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-halt.service": {
"name": "systemd-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hibernate-resume@.service": {
"name": "systemd-hibernate-resume@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-hibernate.service": {
"name": "systemd-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hostnamed.service": {
"name": "systemd-hostnamed.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hwdb-update.service": {
"name": "systemd-hwdb-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hybrid-sleep.service": {
"name": "systemd-hybrid-sleep.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-initctl.service": {
"name": "systemd-initctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-catalog-update.service": {
"name": "systemd-journal-catalog-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-flush.service": {
"name": "systemd-journal-flush.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journald.service": {
"name": "systemd-journald.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-kexec.service": {
"name": "systemd-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-localed.service": {
"name": "systemd-localed.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-logind.service": {
"name": "systemd-logind.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-machine-id-commit.service": {
"name": "systemd-machine-id-commit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-modules-load.service": {
"name": "systemd-modules-load.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-portabled.service": {
"name": "systemd-portabled.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-poweroff.service": {
"name": "systemd-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-pstore.service": {
"name": "systemd-pstore.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-quotacheck.service": {
"name": "systemd-quotacheck.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-random-seed.service": {
"name": "systemd-random-seed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-reboot.service": {
"name": "systemd-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-remount-fs.service": {
"name": "systemd-remount-fs.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-resolved.service": {
"name": "systemd-resolved.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-rfkill.service": {
"name": "systemd-rfkill.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend-then-hibernate.service": {
"name": "systemd-suspend-then-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend.service": {
"name": "systemd-suspend.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-sysctl.service": {
"name": "systemd-sysctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-sysusers.service": {
"name": "systemd-sysusers.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-timedated.service": {
"name": "systemd-timedated.service",
"source": "systemd",
"state": "inactive",
"status": "masked"
},
"systemd-tmpfiles-clean.service": {
"name": "systemd-tmpfiles-clean.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup-dev.service": {
"name": "systemd-tmpfiles-setup-dev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup.service": {
"name": "systemd-tmpfiles-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udev-settle.service": {
"name": "systemd-udev-settle.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-udev-trigger.service": {
"name": "systemd-udev-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udevd.service": {
"name": "systemd-udevd.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-update-done.service": {
"name": "systemd-update-done.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp-runlevel.service": {
"name": "systemd-update-utmp-runlevel.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp.service": {
"name": "systemd-update-utmp.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-user-sessions.service": {
"name": "systemd-user-sessions.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-vconsole-setup.service": {
"name": "systemd-vconsole-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-volatile-root.service": {
"name": "systemd-volatile-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"tcsd.service": {
"name": "tcsd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"teamd@.service": {
"name": "teamd@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"timedatex.service": {
"name": "timedatex.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"tuned.service": {
"name": "tuned.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"unbound-anchor.service": {
"name": "unbound-anchor.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"user-runtime-dir@.service": {
"name": "user-runtime-dir@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user-runtime-dir@0.service": {
"name": "user-runtime-dir@0.service",
"source": "systemd",
"state": "stopped",
"status": "unknown"
},
"user@.service": {
"name": "user@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user@0.service": {
"name": "user@0.service",
"source": "systemd",
"state": "running",
"status": "unknown"
}
}
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:114
Saturday 10 August 2024 12:49:09 -0400 (0:00:01.683) 0:02:33.858 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Saturday 10 August 2024 12:49:10 -0400 (0:00:00.035) 0:02:33.893 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "quadlet-demo.network",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "[Network]\nSubnet=192.168.30.0/24\nGateway=192.168.30.1\nLabel=app=wordpress",
"__podman_quadlet_template_src": ""
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Saturday 10 August 2024 12:49:10 -0400 (0:00:00.049) 0:02:33.943 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "absent",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Saturday 10 August 2024 12:49:10 -0400 (0:00:00.044) 0:02:33.987 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Saturday 10 August 2024 12:49:10 -0400 (0:00:00.038) 0:02:34.026 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo",
"__podman_quadlet_type": "network",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Saturday 10 August 2024 12:49:10 -0400 (0:00:00.051) 0:02:34.077 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Saturday 10 August 2024 12:49:10 -0400 (0:00:00.068) 0:02:34.146 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Saturday 10 August 2024 12:49:10 -0400 (0:00:00.079) 0:02:34.226 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Saturday 10 August 2024 12:49:10 -0400 (0:00:00.044) 0:02:34.271 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get group information] ****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Saturday 10 August 2024 12:49:10 -0400 (0:00:00.048) 0:02:34.319 *******
ok: [managed_node1] => {
"ansible_facts": {
"getent_group": {
"root": [
"x",
"0",
""
]
}
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set group name] ***********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35
Saturday 10 August 2024 12:49:10 -0400 (0:00:00.384) 0:02:34.704 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_group_name": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Saturday 10 August 2024 12:49:10 -0400 (0:00:00.047) 0:02:34.751 *******
ok: [managed_node1] => {
"changed": false,
"stat": {
"atime": 1723307429.1990302,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b",
"ctime": 1723307390.9915028,
"dev": 51713,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 6986657,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-sharedlib",
"mode": "0755",
"mtime": 1700557386.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 12640,
"uid": 0,
"version": "1934096266",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check user with getsubids] ************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:50
Saturday 10 August 2024 12:49:11 -0400 (0:00:00.380) 0:02:35.131 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check group with getsubids] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:55
Saturday 10 August 2024 12:49:11 -0400 (0:00:00.036) 0:02:35.168 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:60
Saturday 10 August 2024 12:49:11 -0400 (0:00:00.035) 0:02:35.203 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74
Saturday 10 August 2024 12:49:11 -0400 (0:00:00.036) 0:02:35.240 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79
Saturday 10 August 2024 12:49:11 -0400 (0:00:00.036) 0:02:35.276 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:84
Saturday 10 August 2024 12:49:11 -0400 (0:00:00.034) 0:02:35.311 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:94
Saturday 10 August 2024 12:49:11 -0400 (0:00:00.037) 0:02:35.348 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if group not in subgid file] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:101
Saturday 10 August 2024 12:49:11 -0400 (0:00:00.035) 0:02:35.384 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Saturday 10 August 2024 12:49:11 -0400 (0:00:00.034) 0:02:35.419 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "quadlet-demo-network.service",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Saturday 10 August 2024 12:49:11 -0400 (0:00:00.141) 0:02:35.560 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Saturday 10 August 2024 12:49:11 -0400 (0:00:00.038) 0:02:35.599 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Saturday 10 August 2024 12:49:11 -0400 (0:00:00.034) 0:02:35.633 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_images": [],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.network",
"__podman_volumes": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:103
Saturday 10 August 2024 12:49:11 -0400 (0:00:00.085) 0:02:35.719 *******
ok: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:110
Saturday 10 August 2024 12:49:11 -0400 (0:00:00.042) 0:02:35.762 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4
Saturday 10 August 2024 12:49:11 -0400 (0:00:00.086) 0:02:35.848 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stop and disable service] *************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12
Saturday 10 August 2024 12:49:12 -0400 (0:00:00.035) 0:02:35.884 *******
changed: [managed_node1] => {
"changed": true,
"enabled": false,
"failed_when_result": false,
"name": "quadlet-demo-network.service",
"state": "stopped",
"status": {
"ActiveEnterTimestamp": "Sat 2024-08-10 12:47:10 EDT",
"ActiveEnterTimestampMonotonic": "295819937",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "active",
"After": "basic.target system.slice -.mount sysinit.target systemd-journald.socket",
"AllowIsolate": "no",
"AllowedCPUs": "",
"AllowedMemoryNodes": "",
"AmbientCapabilities": "",
"AssertResult": "yes",
"AssertTimestamp": "Sat 2024-08-10 12:47:10 EDT",
"AssertTimestampMonotonic": "295760743",
"Before": "shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "no",
"CPUAffinity": "",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf",
"CollectMode": "inactive",
"ConditionResult": "yes",
"ConditionTimestamp": "Sat 2024-08-10 12:47:10 EDT",
"ConditionTimestampMonotonic": "295760741",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlGroup": "/system.slice/quadlet-demo-network.service",
"ControlPID": "0",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "quadlet-demo-network.service",
"DevicePolicy": "auto",
"DynamicUser": "no",
"EffectiveCPUs": "",
"EffectiveMemoryNodes": "",
"ExecMainCode": "1",
"ExecMainExitTimestamp": "Sat 2024-08-10 12:47:10 EDT",
"ExecMainExitTimestampMonotonic": "295819688",
"ExecMainPID": "54229",
"ExecMainStartTimestamp": "Sat 2024-08-10 12:47:10 EDT",
"ExecMainStartTimestampMonotonic": "295761673",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet=192.168.30.0/24 --gateway=192.168.30.1 --label app=wordpress systemd-quadlet-demo ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FragmentPath": "/run/systemd/generator/quadlet-demo-network.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOSchedulingClass": "0",
"IOSchedulingPriority": "0",
"IOWeight": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "18446744073709551615",
"IPEgressPackets": "18446744073709551615",
"IPIngressBytes": "18446744073709551615",
"IPIngressPackets": "18446744073709551615",
"Id": "quadlet-demo-network.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestamp": "Sat 2024-08-10 12:47:10 EDT",
"InactiveExitTimestampMonotonic": "295761712",
"InvocationID": "0b000a0f0ee24315b3aa759f881648d3",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "control-group",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "0",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "65536",
"LimitMEMLOCKSoft": "65536",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "262144",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14003",
"LimitNPROCSoft": "14003",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14003",
"LimitSIGPENDINGSoft": "14003",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"MemoryAccounting": "yes",
"MemoryCurrent": "0",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"MountFlags": "",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAMask": "",
"NUMAPolicy": "n/a",
"Names": "quadlet-demo-network.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"PermissionsStartOnly": "no",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"RemainAfterExit": "yes",
"RemoveIPC": "no",
"Requires": "system.slice -.mount sysinit.target",
"RequiresMountsFor": "/run/containers",
"Restart": "no",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"StandardError": "inherit",
"StandardInput": "null",
"StandardInputData": "",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestamp": "Sat 2024-08-10 12:47:10 EDT",
"StateChangeTimestampMonotonic": "295819937",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "exited",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogIdentifier": "quadlet-demo-network",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "0",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "0",
"TasksMax": "22405",
"TimeoutStartUSec": "infinity",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "oneshot",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "generated",
"UtmpMode": "init",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.podman : See if quadlet file exists] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33
Saturday 10 August 2024 12:49:12 -0400 (0:00:00.667) 0:02:36.552 *******
ok: [managed_node1] => {
"changed": false,
"stat": {
"atime": 1723308430.077487,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 8,
"charset": "us-ascii",
"checksum": "e57c08d49aff4bae8daab138d913aeddaa8682a0",
"ctime": 1723308429.4514806,
"dev": 51713,
"device_type": 0,
"executable": false,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 239075589,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "text/plain",
"mode": "0644",
"mtime": 1723308429.0594769,
"nlink": 1,
"path": "/etc/containers/systemd/quadlet-demo.network",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 74,
"uid": 0,
"version": "425925242",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": false,
"xoth": false,
"xusr": false
}
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38
Saturday 10 August 2024 12:49:13 -0400 (0:00:00.379) 0:02:36.932 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Slurp quadlet file] *******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6
Saturday 10 August 2024 12:49:13 -0400 (0:00:00.066) 0:02:36.998 *******
ok: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12
Saturday 10 August 2024 12:49:13 -0400 (0:00:00.373) 0:02:37.372 *******
ok: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44
Saturday 10 August 2024 12:49:13 -0400 (0:00:00.053) 0:02:37.425 *******
skipping: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Reset raw variable] *******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52
Saturday 10 August 2024 12:49:13 -0400 (0:00:00.036) 0:02:37.462 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_raw": null
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Remove quadlet file] ******************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42
Saturday 10 August 2024 12:49:13 -0400 (0:00:00.035) 0:02:37.498 *******
changed: [managed_node1] => {
"changed": true,
"path": "/etc/containers/systemd/quadlet-demo.network",
"state": "absent"
}
TASK [fedora.linux_system_roles.podman : Refresh systemd] **********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48
Saturday 10 August 2024 12:49:13 -0400 (0:00:00.377) 0:02:37.875 *******
ok: [managed_node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Remove managed resource] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58
Saturday 10 August 2024 12:49:14 -0400 (0:00:00.615) 0:02:38.491 *******
changed: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Remove volumes] ***********************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:95
Saturday 10 August 2024 12:49:15 -0400 (0:00:00.478) 0:02:38.969 *******
skipping: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] *********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:112
Saturday 10 August 2024 12:49:15 -0400 (0:00:00.052) 0:02:39.022 *******
ok: [managed_node1] => {
"ansible_facts": {
"__podman_quadlet_parsed": null
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116
Saturday 10 August 2024 12:49:15 -0400 (0:00:00.034) 0:02:39.056 *******
changed: [managed_node1] => {
"changed": true,
"cmd": [
"podman",
"image",
"prune",
"--all",
"-f"
],
"delta": "0:00:00.040355",
"end": "2024-08-10 12:49:15.529111",
"rc": 0,
"start": "2024-08-10 12:49:15.488756"
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:127
Saturday 10 August 2024 12:49:15 -0400 (0:00:00.454) 0:02:39.510 *******
included: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed_node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Saturday 10 August 2024 12:49:15 -0400 (0:00:00.067) 0:02:39.577 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Saturday 10 August 2024 12:49:15 -0400 (0:00:00.035) 0:02:39.613 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Saturday 10 August 2024 12:49:15 -0400 (0:00:00.036) 0:02:39.650 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - images] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:137
Saturday 10 August 2024 12:49:15 -0400 (0:00:00.036) 0:02:39.686 *******
ok: [managed_node1] => {
"changed": false,
"cmd": [
"podman",
"images",
"-n"
],
"delta": "0:00:00.036384",
"end": "2024-08-10 12:49:16.153047",
"rc": 0,
"start": "2024-08-10 12:49:16.116663"
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:146
Saturday 10 August 2024 12:49:16 -0400 (0:00:00.418) 0:02:40.104 *******
ok: [managed_node1] => {
"changed": false,
"cmd": [
"podman",
"volume",
"ls",
"-n"
],
"delta": "0:00:00.038953",
"end": "2024-08-10 12:49:16.573984",
"rc": 0,
"start": "2024-08-10 12:49:16.535031"
}
STDOUT:
local wp-pv-claim
local envoy-proxy-config
local envoy-certificates
TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:155
Saturday 10 August 2024 12:49:16 -0400 (0:00:00.421) 0:02:40.526 *******
ok: [managed_node1] => {
"changed": false,
"cmd": [
"podman",
"ps",
"--noheading"
],
"delta": "0:00:00.038229",
"end": "2024-08-10 12:49:16.993176",
"rc": 0,
"start": "2024-08-10 12:49:16.954947"
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:164
Saturday 10 August 2024 12:49:17 -0400 (0:00:00.420) 0:02:40.946 *******
ok: [managed_node1] => {
"changed": false,
"cmd": [
"podman",
"network",
"ls",
"-n",
"-q"
],
"delta": "0:00:00.055910",
"end": "2024-08-10 12:49:17.435071",
"rc": 0,
"start": "2024-08-10 12:49:17.379161"
}
STDOUT:
podman
podman-default-kube-network
TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:173
Saturday 10 August 2024 12:49:17 -0400 (0:00:00.440) 0:02:41.386 *******
ok: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - services] ***
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:183
Saturday 10 August 2024 12:49:17 -0400 (0:00:00.418) 0:02:41.805 *******
ok: [managed_node1] => {
"ansible_facts": {
"services": {
"NetworkManager-dispatcher.service": {
"name": "NetworkManager-dispatcher.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"NetworkManager-wait-online.service": {
"name": "NetworkManager-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"NetworkManager.service": {
"name": "NetworkManager.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"auditd.service": {
"name": "auditd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"auth-rpcgss-module.service": {
"name": "auth-rpcgss-module.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"autovt@.service": {
"name": "autovt@.service",
"source": "systemd",
"state": "unknown",
"status": "enabled"
},
"certmonger.service": {
"name": "certmonger.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"chrony-dnssrv@.service": {
"name": "chrony-dnssrv@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"chrony-wait.service": {
"name": "chrony-wait.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"chronyd.service": {
"name": "chronyd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"cloud-config.service": {
"name": "cloud-config.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-final.service": {
"name": "cloud-final.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init-hotplugd.service": {
"name": "cloud-init-hotplugd.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"cloud-init-local.service": {
"name": "cloud-init-local.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init.service": {
"name": "cloud-init.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cni-dhcp.service": {
"name": "cni-dhcp.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"console-getty.service": {
"name": "console-getty.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"container-getty@.service": {
"name": "container-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"cpupower.service": {
"name": "cpupower.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"crond.service": {
"name": "crond.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"dbus-org.fedoraproject.FirewallD1.service": {
"name": "dbus-org.fedoraproject.FirewallD1.service",
"source": "systemd",
"state": "active",
"status": "enabled"
},
"dbus-org.freedesktop.hostname1.service": {
"name": "dbus-org.freedesktop.hostname1.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dbus-org.freedesktop.locale1.service": {
"name": "dbus-org.freedesktop.locale1.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dbus-org.freedesktop.login1.service": {
"name": "dbus-org.freedesktop.login1.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dbus-org.freedesktop.nm-dispatcher.service": {
"name": "dbus-org.freedesktop.nm-dispatcher.service",
"source": "systemd",
"state": "active",
"status": "enabled"
},
"dbus-org.freedesktop.portable1.service": {
"name": "dbus-org.freedesktop.portable1.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dbus-org.freedesktop.timedate1.service": {
"name": "dbus-org.freedesktop.timedate1.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"dbus.service": {
"name": "dbus.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"debug-shell.service": {
"name": "debug-shell.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dnf-makecache.service": {
"name": "dnf-makecache.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dnf-system-upgrade-cleanup.service": {
"name": "dnf-system-upgrade-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dnf-system-upgrade.service": {
"name": "dnf-system-upgrade.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dnsmasq.service": {
"name": "dnsmasq.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dracut-cmdline.service": {
"name": "dracut-cmdline.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-initqueue.service": {
"name": "dracut-initqueue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-mount.service": {
"name": "dracut-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-mount.service": {
"name": "dracut-pre-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-pivot.service": {
"name": "dracut-pre-pivot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-trigger.service": {
"name": "dracut-pre-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-udev.service": {
"name": "dracut-pre-udev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown-onfailure.service": {
"name": "dracut-shutdown-onfailure.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown.service": {
"name": "dracut-shutdown.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ebtables.service": {
"name": "ebtables.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"emergency.service": {
"name": "emergency.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"firewalld.service": {
"name": "firewalld.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"fstrim.service": {
"name": "fstrim.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"getty@.service": {
"name": "getty@.service",
"source": "systemd",
"state": "unknown",
"status": "enabled"
},
"getty@tty1.service": {
"name": "getty@tty1.service",
"source": "systemd",
"state": "running",
"status": "unknown"
},
"grub-boot-indeterminate.service": {
"name": "grub-boot-indeterminate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"gssproxy.service": {
"name": "gssproxy.service",
"source": "systemd",
"state": "running",
"status": "disabled"
},
"halt-local.service": {
"name": "halt-local.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"import-state.service": {
"name": "import-state.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"initrd-cleanup.service": {
"name": "initrd-cleanup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-parse-etc.service": {
"name": "initrd-parse-etc.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-switch-root.service": {
"name": "initrd-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-udevadm-cleanup-db.service": {
"name": "initrd-udevadm-cleanup-db.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"iprdump.service": {
"name": "iprdump.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"iprinit.service": {
"name": "iprinit.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"iprupdate.service": {
"name": "iprupdate.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"irqbalance.service": {
"name": "irqbalance.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"kdump.service": {
"name": "kdump.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"kmod-static-nodes.service": {
"name": "kmod-static-nodes.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"kvm_stat.service": {
"name": "kvm_stat.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"ldconfig.service": {
"name": "ldconfig.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"loadmodules.service": {
"name": "loadmodules.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"man-db-cache-update.service": {
"name": "man-db-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"man-db-restart-cache-update.service": {
"name": "man-db-restart-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"messagebus.service": {
"name": "messagebus.service",
"source": "systemd",
"state": "active",
"status": "static"
},
"microcode.service": {
"name": "microcode.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"nfs-blkmap.service": {
"name": "nfs-blkmap.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-convert.service": {
"name": "nfs-convert.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-idmapd.service": {
"name": "nfs-idmapd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-mountd.service": {
"name": "nfs-mountd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-server.service": {
"name": "nfs-server.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"nfs-utils.service": {
"name": "nfs-utils.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfsdcld.service": {
"name": "nfsdcld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nftables.service": {
"name": "nftables.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"nis-domainname.service": {
"name": "nis-domainname.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"oddjobd.service": {
"name": "oddjobd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"plymouth-halt.service": {
"name": "plymouth-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-kexec.service": {
"name": "plymouth-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-poweroff.service": {
"name": "plymouth-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-quit-wait.service": {
"name": "plymouth-quit-wait.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-quit.service": {
"name": "plymouth-quit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-read-write.service": {
"name": "plymouth-read-write.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-reboot.service": {
"name": "plymouth-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-start.service": {
"name": "plymouth-start.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-switch-root-initramfs.service": {
"name": "plymouth-switch-root-initramfs.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-switch-root.service": {
"name": "plymouth-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"podman-auto-update.service": {
"name": "podman-auto-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman-clean-transient.service": {
"name": "podman-clean-transient.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman-kube@.service": {
"name": "podman-kube@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"podman-restart.service": {
"name": "podman-restart.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman.service": {
"name": "podman.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"polkit.service": {
"name": "polkit.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"qemu-guest-agent.service": {
"name": "qemu-guest-agent.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"quotaon.service": {
"name": "quotaon.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"rc-local.service": {
"name": "rc-local.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rdisc.service": {
"name": "rdisc.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"rescue.service": {
"name": "rescue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"restraintd.service": {
"name": "restraintd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rngd.service": {
"name": "rngd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rpc-gssd.service": {
"name": "rpc-gssd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd-notify.service": {
"name": "rpc-statd-notify.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd.service": {
"name": "rpc-statd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpcbind.service": {
"name": "rpcbind.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rsyslog.service": {
"name": "rsyslog.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"selinux-autorelabel-mark.service": {
"name": "selinux-autorelabel-mark.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"selinux-autorelabel.service": {
"name": "selinux-autorelabel.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"serial-getty@.service": {
"name": "serial-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"sshd-keygen@.service": {
"name": "sshd-keygen@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"sshd-keygen@ecdsa.service": {
"name": "sshd-keygen@ecdsa.service",
"source": "systemd",
"state": "stopped",
"status": "unknown"
},
"sshd-keygen@ed25519.service": {
"name": "sshd-keygen@ed25519.service",
"source": "systemd",
"state": "stopped",
"status": "unknown"
},
"sshd-keygen@rsa.service": {
"name": "sshd-keygen@rsa.service",
"source": "systemd",
"state": "stopped",
"status": "unknown"
},
"sshd.service": {
"name": "sshd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"sshd@.service": {
"name": "sshd@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"sssd-autofs.service": {
"name": "sssd-autofs.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-kcm.service": {
"name": "sssd-kcm.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"sssd-nss.service": {
"name": "sssd-nss.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pac.service": {
"name": "sssd-pac.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pam.service": {
"name": "sssd-pam.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-ssh.service": {
"name": "sssd-ssh.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-sudo.service": {
"name": "sssd-sudo.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd.service": {
"name": "sssd.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"syslog.service": {
"name": "syslog.service",
"source": "systemd",
"state": "active",
"status": "enabled"
},
"system-update-cleanup.service": {
"name": "system-update-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-ask-password-console.service": {
"name": "systemd-ask-password-console.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-plymouth.service": {
"name": "systemd-ask-password-plymouth.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-wall.service": {
"name": "systemd-ask-password-wall.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-backlight@.service": {
"name": "systemd-backlight@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-binfmt.service": {
"name": "systemd-binfmt.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-coredump@.service": {
"name": "systemd-coredump@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-exit.service": {
"name": "systemd-exit.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-firstboot.service": {
"name": "systemd-firstboot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck-root.service": {
"name": "systemd-fsck-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck@.service": {
"name": "systemd-fsck@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-halt.service": {
"name": "systemd-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hibernate-resume@.service": {
"name": "systemd-hibernate-resume@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-hibernate.service": {
"name": "systemd-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hostnamed.service": {
"name": "systemd-hostnamed.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hwdb-update.service": {
"name": "systemd-hwdb-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hybrid-sleep.service": {
"name": "systemd-hybrid-sleep.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-initctl.service": {
"name": "systemd-initctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-catalog-update.service": {
"name": "systemd-journal-catalog-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-flush.service": {
"name": "systemd-journal-flush.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journald.service": {
"name": "systemd-journald.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-kexec.service": {
"name": "systemd-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-localed.service": {
"name": "systemd-localed.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-logind.service": {
"name": "systemd-logind.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-machine-id-commit.service": {
"name": "systemd-machine-id-commit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-modules-load.service": {
"name": "systemd-modules-load.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-portabled.service": {
"name": "systemd-portabled.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-poweroff.service": {
"name": "systemd-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-pstore.service": {
"name": "systemd-pstore.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-quotacheck.service": {
"name": "systemd-quotacheck.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-random-seed.service": {
"name": "systemd-random-seed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-reboot.service": {
"name": "systemd-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-remount-fs.service": {
"name": "systemd-remount-fs.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-resolved.service": {
"name": "systemd-resolved.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-rfkill.service": {
"name": "systemd-rfkill.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend-then-hibernate.service": {
"name": "systemd-suspend-then-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend.service": {
"name": "systemd-suspend.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-sysctl.service": {
"name": "systemd-sysctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-sysusers.service": {
"name": "systemd-sysusers.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-timedated.service": {
"name": "systemd-timedated.service",
"source": "systemd",
"state": "inactive",
"status": "masked"
},
"systemd-tmpfiles-clean.service": {
"name": "systemd-tmpfiles-clean.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup-dev.service": {
"name": "systemd-tmpfiles-setup-dev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup.service": {
"name": "systemd-tmpfiles-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udev-settle.service": {
"name": "systemd-udev-settle.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-udev-trigger.service": {
"name": "systemd-udev-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udevd.service": {
"name": "systemd-udevd.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-update-done.service": {
"name": "systemd-update-done.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp-runlevel.service": {
"name": "systemd-update-utmp-runlevel.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp.service": {
"name": "systemd-update-utmp.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-user-sessions.service": {
"name": "systemd-user-sessions.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-vconsole-setup.service": {
"name": "systemd-vconsole-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-volatile-root.service": {
"name": "systemd-volatile-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"tcsd.service": {
"name": "tcsd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"teamd@.service": {
"name": "teamd@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"timedatex.service": {
"name": "timedatex.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"tuned.service": {
"name": "tuned.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"unbound-anchor.service": {
"name": "unbound-anchor.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"user-runtime-dir@.service": {
"name": "user-runtime-dir@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user-runtime-dir@0.service": {
"name": "user-runtime-dir@0.service",
"source": "systemd",
"state": "stopped",
"status": "unknown"
},
"user@.service": {
"name": "user@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user@0.service": {
"name": "user@0.service",
"source": "systemd",
"state": "running",
"status": "unknown"
}
}
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:114
Saturday 10 August 2024 12:49:19 -0400 (0:00:01.667) 0:02:43.472 *******
skipping: [managed_node1] => {
"changed": false,
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Cancel linger] ************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:143
Saturday 10 August 2024 12:49:19 -0400 (0:00:00.035) 0:02:43.508 *******
TASK [fedora.linux_system_roles.podman : Handle credential files - absent] *****
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:149
Saturday 10 August 2024 12:49:19 -0400 (0:00:00.033) 0:02:43.541 *******
skipping: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ********
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:158
Saturday 10 August 2024 12:49:19 -0400 (0:00:00.033) 0:02:43.575 *******
skipping: [managed_node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [Ensure no resources] *****************************************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:176
Saturday 10 August 2024 12:49:19 -0400 (0:00:00.034) 0:02:43.609 *******
fatal: [managed_node1]: FAILED! => {
"assertion": "__podman_test_debug_volumes.stdout == \"\"",
"changed": false,
"evaluated_to": false
}
MSG:
Assertion failed
TASK [Debug] *******************************************************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:187
Saturday 10 August 2024 12:49:19 -0400 (0:00:00.042) 0:02:43.652 *******
ok: [managed_node1] => {
"changed": false,
"cmd": "exec 1>&2\nset -x\nset -o pipefail\nsystemctl list-units --plain -l --all | grep quadlet || :\nsystemctl list-unit-files --all | grep quadlet || :\nsystemctl list-units --plain --failed -l --all | grep quadlet || :\n",
"delta": "0:00:00.379570",
"end": "2024-08-10 12:49:20.450649",
"rc": 0,
"start": "2024-08-10 12:49:20.071079"
}
STDERR:
+ set -o pipefail
+ systemctl list-units --plain -l --all
+ grep quadlet
+ :
+ systemctl list-unit-files --all
+ grep quadlet
+ :
+ systemctl list-units --plain --failed -l --all
+ grep quadlet
+ :
TASK [Get journald] ************************************************************
task path: /tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:197
Saturday 10 August 2024 12:49:20 -0400 (0:00:00.748) 0:02:44.400 *******
fatal: [managed_node1]: FAILED! => {
"changed": false,
"cmd": [
"journalctl",
"-ex"
],
"delta": "0:00:00.120911",
"end": "2024-08-10 12:49:20.938768",
"failed_when_result": true,
"rc": 0,
"start": "2024-08-10 12:49:20.817857"
}
STDOUT:
-- Logs begin at Sat 2024-08-10 12:42:15 EDT, end at Sat 2024-08-10 12:49:20 EDT. --
Aug 10 12:43:33 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[13933]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jnoqsnvuyjxajphoalpnmboofaotwzpd ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308213.083026-23655-150113337306224/AnsiballZ_command.py'
Aug 10 12:43:33 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[13933]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:43:33 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[13936]: ansible-command Invoked with _raw_params=podman network ls -n -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:43:33 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Started podman-13938.scope.
-- Subject: Unit UNIT has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished starting up.
--
-- The start-up result is done.
Aug 10 12:43:33 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[13933]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:43:33 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[14116]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kzndzojzviktbtyqpyajliczdefsmfoz ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308213.5455413-23664-11152595684436/AnsiballZ_command.py'
Aug 10 12:43:33 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[14116]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:43:33 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Started podman-14121.scope.
-- Subject: Unit UNIT has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished starting up.
--
-- The start-up result is done.
Aug 10 12:43:33 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[14116]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:43:34 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[14249]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pmvnffxuygsetqbxpdmprsmjrvnhziqg ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308213.9845612-23673-206621214442884/AnsiballZ_service_facts.py'
Aug 10 12:43:34 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[14249]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:43:34 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[14252]: ansible-service_facts Invoked
Aug 10 12:43:35 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[14249]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:43:36 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[14463]: ansible-getent Invoked with database=group key=1111 fail_key=False service=None split=None
Aug 10 12:43:37 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[14587]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:43:37 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[14712]: ansible-command Invoked with _raw_params=getsubids user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:43:37 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[14836]: ansible-command Invoked with _raw_params=getsubids -g user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:43:38 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[14960]: ansible-stat Invoked with path=/run/user/1111 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:43:39 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[15085]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bxtzcvaxkcjwypswihrodadfdvqnlqvq ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308219.0742118-23771-114311856016967/AnsiballZ_systemd.py'
Aug 10 12:43:39 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[15085]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:43:39 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[15088]: ansible-systemd Invoked with name=quadlet-basic-unused-volume-volume.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None user=None
Aug 10 12:43:39 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Reloading.
Aug 10 12:43:39 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Stopped quadlet-basic-unused-volume-volume.service.
-- Subject: Unit UNIT has finished shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished shutting down.
Aug 10 12:43:39 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[15085]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:43:39 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[15225]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:43:40 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[15473]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:43:41 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[15596]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lklxciwiejbqwloudnsgxexoxfrqlyjy ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308220.9678679-23815-268963079414371/AnsiballZ_systemd.py'
Aug 10 12:43:41 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[15596]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:43:41 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[15599]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None
Aug 10 12:43:41 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Reloading.
Aug 10 12:43:41 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[15596]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:43:41 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[15733]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-oyrhrkvybffichyhbchpqndvgzmlcarw ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308221.518504-23824-152225894760651/AnsiballZ_command.py'
Aug 10 12:43:41 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[15733]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:43:41 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Started podman-15738.scope.
-- Subject: Unit UNIT has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished starting up.
--
-- The start-up result is done.
Aug 10 12:43:41 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[15733]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:43:42 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[15866]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-sxpfsocimgswvvrfcpgnojbsrzqgptou ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308222.0578556-23837-136291591930547/AnsiballZ_command.py'
Aug 10 12:43:42 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[15866]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:43:42 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[15869]: ansible-command Invoked with _raw_params=podman image prune --all -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:43:42 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Started podman-15871.scope.
-- Subject: Unit UNIT has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished starting up.
--
-- The start-up result is done.
Aug 10 12:43:42 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[15866]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:43:42 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[15999]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nppwgzfobxihdglostpdikqslykefcwt ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308222.7238343-23854-244636546566587/AnsiballZ_command.py'
Aug 10 12:43:42 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[15999]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:43:42 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[16002]: ansible-command Invoked with _raw_params=podman images -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:43:43 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Started podman-16004.scope.
-- Subject: Unit UNIT has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished starting up.
--
-- The start-up result is done.
Aug 10 12:43:43 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[15999]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:43:43 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[16133]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qaksuwaxgmuvekwpvzmzyxwddyzihizr ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308223.1618078-23863-234874855546045/AnsiballZ_command.py'
Aug 10 12:43:43 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[16133]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:43:43 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[16136]: ansible-command Invoked with _raw_params=podman volume ls -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:43:43 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Started podman-16138.scope.
-- Subject: Unit UNIT has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished starting up.
--
-- The start-up result is done.
Aug 10 12:43:43 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[16133]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:43:43 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[16266]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jogyyednlccescrrwdqnsqooqiiwcqlv ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308223.5990522-23872-271958063126886/AnsiballZ_command.py'
Aug 10 12:43:43 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[16266]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:43:43 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[16269]: ansible-command Invoked with _raw_params=podman ps --noheading warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:43:43 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Started podman-16271.scope.
-- Subject: Unit UNIT has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished starting up.
--
-- The start-up result is done.
Aug 10 12:43:43 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[16266]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:43:44 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[16399]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tnjxnpkpswwloiqqakzesdfpmlcsfqrn ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308224.0353823-23881-101626613021610/AnsiballZ_command.py'
Aug 10 12:43:44 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[16399]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:43:44 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[16402]: ansible-command Invoked with _raw_params=podman network ls -n -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:43:44 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Started podman-16404.scope.
-- Subject: Unit UNIT has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished starting up.
--
-- The start-up result is done.
Aug 10 12:43:44 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[16399]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:43:44 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[16582]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-levndgjpbfymyaqydbincsoyyjummrma ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308224.5067992-23890-235911046948700/AnsiballZ_command.py'
Aug 10 12:43:44 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[16582]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:43:44 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Started podman-16587.scope.
-- Subject: Unit UNIT has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished starting up.
--
-- The start-up result is done.
Aug 10 12:43:44 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[16582]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:43:45 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[16715]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-igjoonfmttcqrafljwuwdiwljeupskcb ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308224.9423523-23899-119343147197176/AnsiballZ_service_facts.py'
Aug 10 12:43:45 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[16715]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:43:45 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[16718]: ansible-service_facts Invoked
Aug 10 12:43:46 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[16715]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:43:47 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[16929]: ansible-getent Invoked with database=group key=1111 fail_key=False service=None split=None
Aug 10 12:43:47 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[17053]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:43:48 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[17178]: ansible-command Invoked with _raw_params=getsubids user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:43:48 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[17302]: ansible-command Invoked with _raw_params=getsubids -g user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:43:49 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[17426]: ansible-stat Invoked with path=/run/user/1111 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:43:49 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[17551]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cvhzesrcyvcsonrmkxsuqbrqmxtksqjt ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308229.6295996-23997-223346227041871/AnsiballZ_systemd.py'
Aug 10 12:43:49 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[17551]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:43:50 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[17554]: ansible-systemd Invoked with name=quadlet-basic-mysql-volume.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None user=None
Aug 10 12:43:50 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Reloading.
Aug 10 12:43:50 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Stopped quadlet-basic-mysql-volume.service.
-- Subject: Unit UNIT has finished shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished shutting down.
Aug 10 12:43:50 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[17551]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:43:50 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[17691]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:43:50 ip-10-31-12-150.us-east-1.aws.redhat.com auditd[529]: Audit daemon rotating log files
Aug 10 12:43:51 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[17939]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:43:51 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[18062]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zqczbilfvdjcufkpaycxlehurnaxoydw ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308231.5190344-24041-70610728289615/AnsiballZ_systemd.py'
Aug 10 12:43:51 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[18062]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:43:51 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[18065]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None
Aug 10 12:43:51 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Reloading.
Aug 10 12:43:51 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[18062]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:43:52 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[18200]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dzjtagqvmxhbaztogausbvyhdmwhkgym ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308232.0640543-24050-239637300846761/AnsiballZ_command.py'
Aug 10 12:43:52 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[18200]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:43:52 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Started podman-18205.scope.
-- Subject: Unit UNIT has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished starting up.
--
-- The start-up result is done.
Aug 10 12:43:52 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[18200]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:43:52 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[18333]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qqpgeorenievoynhcodfqigjdoruebhh ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308232.6210217-24063-280861178833927/AnsiballZ_command.py'
Aug 10 12:43:52 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[18333]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:43:52 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[18336]: ansible-command Invoked with _raw_params=podman image prune --all -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:43:52 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Started podman-18338.scope.
-- Subject: Unit UNIT has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished starting up.
--
-- The start-up result is done.
Aug 10 12:43:52 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[18333]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:43:53 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[18466]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bjyojjjvdgfjouvoaldepuopmwaxvgxf ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308233.2860017-24080-87720165871104/AnsiballZ_command.py'
Aug 10 12:43:53 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[18466]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:43:53 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[18469]: ansible-command Invoked with _raw_params=podman images -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:43:53 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Started podman-18471.scope.
-- Subject: Unit UNIT has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished starting up.
--
-- The start-up result is done.
Aug 10 12:43:53 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[18466]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:43:53 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[18599]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ibubeerglddfaephygasysbybgmskouj ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308233.7317252-24089-146173828954122/AnsiballZ_command.py'
Aug 10 12:43:53 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[18599]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:43:54 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[18602]: ansible-command Invoked with _raw_params=podman volume ls -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:43:54 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Started podman-18604.scope.
-- Subject: Unit UNIT has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished starting up.
--
-- The start-up result is done.
Aug 10 12:43:54 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[18599]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:43:54 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[18732]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-saafydpldfeqytynkdaeoqptlpgmoybx ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308234.168725-24098-34550627955245/AnsiballZ_command.py'
Aug 10 12:43:54 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[18732]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:43:54 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[18735]: ansible-command Invoked with _raw_params=podman ps --noheading warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:43:54 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Started podman-18737.scope.
-- Subject: Unit UNIT has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished starting up.
--
-- The start-up result is done.
Aug 10 12:43:54 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[18732]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:43:54 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[18866]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-oxeehrndplaiswkdbphwdpkkctqymwmi ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308234.6065106-24107-82442720480984/AnsiballZ_command.py'
Aug 10 12:43:54 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[18866]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:43:54 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[18869]: ansible-command Invoked with _raw_params=podman network ls -n -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:43:54 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Started podman-18871.scope.
-- Subject: Unit UNIT has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished starting up.
--
-- The start-up result is done.
Aug 10 12:43:54 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[18866]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:43:55 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[19050]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bdrkfkcpbiuthefmhbvbrjpuwwscfrgl ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308235.071577-24116-185748091904983/AnsiballZ_command.py'
Aug 10 12:43:55 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[19050]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:43:55 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Started podman-19055.scope.
-- Subject: Unit UNIT has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished starting up.
--
-- The start-up result is done.
Aug 10 12:43:55 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[19050]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:43:55 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[19183]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hnoljomfvrfrgsayxnzbvcyefjgvkdyn ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308235.5106153-24125-210604499349946/AnsiballZ_service_facts.py'
Aug 10 12:43:55 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[19183]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:43:55 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[19186]: ansible-service_facts Invoked
Aug 10 12:43:57 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[19183]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:43:57 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[19397]: ansible-getent Invoked with database=group key=1111 fail_key=False service=None split=None
Aug 10 12:43:58 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[19521]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:43:58 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[19646]: ansible-command Invoked with _raw_params=getsubids user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:43:59 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[19770]: ansible-command Invoked with _raw_params=getsubids -g user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:44:00 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[19894]: ansible-stat Invoked with path=/run/user/1111 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:44:00 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[20019]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wsjiigojkndubzkgvubujdiiqhorrteb ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308240.2124596-24223-267758520514728/AnsiballZ_systemd.py'
Aug 10 12:44:00 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[20019]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:44:00 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[20022]: ansible-systemd Invoked with name=quadlet-basic-unused-network-network.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None user=None
Aug 10 12:44:00 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Reloading.
Aug 10 12:44:00 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Stopped quadlet-basic-unused-network-network.service.
-- Subject: Unit UNIT has finished shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished shutting down.
Aug 10 12:44:00 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[20019]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:44:01 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[20159]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:44:02 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[20407]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:44:02 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[20530]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xcgqknewmxjlhbxbeallgzxnfuhmqmjm ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308242.1432724-24267-121285088406940/AnsiballZ_systemd.py'
Aug 10 12:44:02 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[20530]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:44:02 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[20533]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None
Aug 10 12:44:02 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Reloading.
Aug 10 12:44:02 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[20530]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:44:02 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[20667]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gotobwknsxvwpaveycjtacsatztjhyts ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308242.6938133-24276-19914896142524/AnsiballZ_command.py'
Aug 10 12:44:02 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[20667]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:44:03 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Started podman-20672.scope.
-- Subject: Unit UNIT has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished starting up.
--
-- The start-up result is done.
Aug 10 12:44:03 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[20667]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:44:03 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[20851]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-svarjpqfqemgothfhfzulagjmyizahze ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308243.261722-24289-5427076039198/AnsiballZ_command.py'
Aug 10 12:44:03 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[20851]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:44:03 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[20854]: ansible-command Invoked with _raw_params=podman image prune --all -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:44:03 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Started podman-20856.scope.
-- Subject: Unit UNIT has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished starting up.
--
-- The start-up result is done.
Aug 10 12:44:03 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[20851]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:44:04 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[20984]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zrbctmzhhfbptkxeonxpyokhfdsuedcj ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308243.9321833-24306-110583504836717/AnsiballZ_command.py'
Aug 10 12:44:04 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[20984]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:44:04 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[20987]: ansible-command Invoked with _raw_params=podman images -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:44:04 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Started podman-20989.scope.
-- Subject: Unit UNIT has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished starting up.
--
-- The start-up result is done.
Aug 10 12:44:04 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[20984]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:44:04 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[21117]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zavrcziqvddsgmrdivwzfdldrbsohnck ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308244.3833263-24315-177323097731150/AnsiballZ_command.py'
Aug 10 12:44:04 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[21117]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:44:04 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[21120]: ansible-command Invoked with _raw_params=podman volume ls -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:44:04 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Started podman-21122.scope.
-- Subject: Unit UNIT has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished starting up.
--
-- The start-up result is done.
Aug 10 12:44:04 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[21117]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:44:04 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[21250]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ylelberopnusqtellejdbgyrykyqzhtw ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308244.8325934-24324-109202165102740/AnsiballZ_command.py'
Aug 10 12:44:04 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[21250]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:44:05 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[21253]: ansible-command Invoked with _raw_params=podman ps --noheading warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:44:05 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Started podman-21255.scope.
-- Subject: Unit UNIT has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished starting up.
--
-- The start-up result is done.
Aug 10 12:44:05 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[21250]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:44:05 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[21383]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vvgjnkwnellvdpilvbgsckouiowbxlwm ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308245.2774293-24333-231379791351632/AnsiballZ_command.py'
Aug 10 12:44:05 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[21383]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:44:05 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[21386]: ansible-command Invoked with _raw_params=podman network ls -n -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:44:05 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Started podman-21388.scope.
-- Subject: Unit UNIT has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished starting up.
--
-- The start-up result is done.
Aug 10 12:44:05 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[21383]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:44:05 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[21541]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xpmshbkclizexonjmmyjoppkbhixkbpd ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308245.7359586-24342-18741746340762/AnsiballZ_command.py'
Aug 10 12:44:05 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[21541]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:44:06 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Started podman-21546.scope.
-- Subject: Unit UNIT has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished starting up.
--
-- The start-up result is done.
Aug 10 12:44:06 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[21541]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:44:06 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[21674]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-yiaeqzdaaxtyivyqqicvyykqaqfvtezf ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308246.1813927-24351-270902670874747/AnsiballZ_service_facts.py'
Aug 10 12:44:06 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[21674]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:44:06 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[21677]: ansible-service_facts Invoked
Aug 10 12:44:07 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[21674]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:44:08 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[21888]: ansible-getent Invoked with database=group key=1111 fail_key=False service=None split=None
Aug 10 12:44:09 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[22012]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:44:09 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[22137]: ansible-command Invoked with _raw_params=getsubids user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:44:09 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[22261]: ansible-command Invoked with _raw_params=getsubids -g user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:44:10 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[22385]: ansible-stat Invoked with path=/run/user/1111 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:44:11 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[22510]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kwnhhryghhoutnuuxwigmrmbrsntcdpf ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308250.8994346-24449-191245465991737/AnsiballZ_systemd.py'
Aug 10 12:44:11 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[22510]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:44:11 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[22513]: ansible-systemd Invoked with name=quadlet-basic-network.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None user=None
Aug 10 12:44:11 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Reloading.
Aug 10 12:44:11 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Stopped quadlet-basic-network.service.
-- Subject: Unit UNIT has finished shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished shutting down.
Aug 10 12:44:11 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[22510]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:44:11 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[22650]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:44:12 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[22898]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:44:12 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[23021]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xzpuvtpfanldgbtigklughqkqhikfvtb ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308252.792077-24493-224362684233352/AnsiballZ_systemd.py'
Aug 10 12:44:12 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[23021]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:44:13 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[23024]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None
Aug 10 12:44:13 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Reloading.
Aug 10 12:44:13 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[23021]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:44:13 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[23158]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vkcwdlntfqpbpshnyyhjhececdlbautt ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308253.3315303-24502-251294846596198/AnsiballZ_command.py'
Aug 10 12:44:13 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[23158]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:44:13 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Started podman-23163.scope.
-- Subject: Unit UNIT has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished starting up.
--
-- The start-up result is done.
Aug 10 12:44:13 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[23158]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:44:14 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[23317]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-alhjbrprvbcrgsmqnccohjyvjzodzscy ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308253.890159-24515-84058417537927/AnsiballZ_command.py'
Aug 10 12:44:14 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[23317]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:44:14 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[23320]: ansible-command Invoked with _raw_params=podman image prune --all -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:44:14 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Started podman-23322.scope.
-- Subject: Unit UNIT has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished starting up.
--
-- The start-up result is done.
Aug 10 12:44:14 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[23317]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:44:14 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[23450]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-akrdwlywjnuirwffjfocjyzbhbhnltfo ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308254.558641-24532-59381013600515/AnsiballZ_command.py'
Aug 10 12:44:14 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[23450]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:44:14 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[23453]: ansible-command Invoked with _raw_params=podman images -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:44:14 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Started podman-23455.scope.
-- Subject: Unit UNIT has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished starting up.
--
-- The start-up result is done.
Aug 10 12:44:14 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[23450]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:44:15 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[23583]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hirsgmmzzxynchcimimpcishpxtrwccp ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308254.9973698-24541-236714414301960/AnsiballZ_command.py'
Aug 10 12:44:15 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[23583]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:44:15 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[23586]: ansible-command Invoked with _raw_params=podman volume ls -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:44:15 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Started podman-23588.scope.
-- Subject: Unit UNIT has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished starting up.
--
-- The start-up result is done.
Aug 10 12:44:15 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[23583]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:44:15 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[23716]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cgdmrwxunawisppepffwnihambklaktc ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308255.4420323-24550-207130264516575/AnsiballZ_command.py'
Aug 10 12:44:15 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[23716]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:44:15 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[23719]: ansible-command Invoked with _raw_params=podman ps --noheading warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:44:15 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Started podman-23721.scope.
-- Subject: Unit UNIT has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished starting up.
--
-- The start-up result is done.
Aug 10 12:44:15 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[23716]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:44:16 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[23849]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kknjwaadnponvhgwztmcdhhpnxlbzxlh ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308255.8789263-24559-6502607309940/AnsiballZ_command.py'
Aug 10 12:44:16 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[23849]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:44:16 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[23852]: ansible-command Invoked with _raw_params=podman network ls -n -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:44:16 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Started podman-23854.scope.
-- Subject: Unit UNIT has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished starting up.
--
-- The start-up result is done.
Aug 10 12:44:16 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[23849]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:44:16 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[23982]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-aaouowbuhxoyeolgznstepwrncpuhrkt ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308256.3204308-24568-252033059434896/AnsiballZ_command.py'
Aug 10 12:44:16 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[23982]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:44:16 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Started podman-23987.scope.
-- Subject: Unit UNIT has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished starting up.
--
-- The start-up result is done.
Aug 10 12:44:16 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[23982]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:44:16 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[24115]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hvokknldejfnuwuistmblvlsqaorvweq ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308256.7627757-24577-98774698316057/AnsiballZ_service_facts.py'
Aug 10 12:44:16 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[24115]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:44:17 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[24118]: ansible-service_facts Invoked
Aug 10 12:44:18 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[24115]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:44:18 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[24329]: ansible-stat Invoked with path=/run/user/1111 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:44:19 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[24454]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-aognclwmmlrtwanqtaopcbgsaocffskm ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308259.0442204-24603-58943877515507/AnsiballZ_podman_container_info.py'
Aug 10 12:44:19 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[24454]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:44:19 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[24457]: ansible-containers.podman.podman_container_info Invoked with executable=podman name=None
Aug 10 12:44:19 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Started podman-24459.scope.
-- Subject: Unit UNIT has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished starting up.
--
-- The start-up result is done.
Aug 10 12:44:19 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[24454]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:44:19 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[24587]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nnxfbcnvepmwgenwsvfzmmqinguoxuky ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308259.5528545-24612-200807019741553/AnsiballZ_command.py'
Aug 10 12:44:19 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[24587]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:44:19 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[24590]: ansible-command Invoked with _raw_params=podman network ls -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:44:19 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Started podman-24592.scope.
-- Subject: Unit UNIT has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished starting up.
--
-- The start-up result is done.
Aug 10 12:44:19 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[24587]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:44:20 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[24720]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wxacwifyimdcztpobyqjnphwjdkadfwc ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1723308259.9912398-24621-104129571645717/AnsiballZ_command.py'
Aug 10 12:44:20 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[24720]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0)
Aug 10 12:44:20 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[24723]: ansible-command Invoked with _raw_params=podman secret ls -n -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:44:20 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Started podman-24725.scope.
-- Subject: Unit UNIT has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished starting up.
--
-- The start-up result is done.
Aug 10 12:44:20 ip-10-31-12-150.us-east-1.aws.redhat.com sudo[24720]: pam_unix(sudo:session): session closed for user user_quadlet_basic
Aug 10 12:44:20 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[24853]: ansible-command Invoked with removes=/var/lib/systemd/linger/user_quadlet_basic _raw_params=loginctl disable-linger user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None stdin=None
Aug 10 12:44:20 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Stopping User Manager for UID 1111...
-- Subject: Unit user@1111.service has begun shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit user@1111.service has begun shutting down.
Aug 10 12:44:20 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Stopping D-Bus User Message Bus...
-- Subject: Unit UNIT has begun shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has begun shutting down.
Aug 10 12:44:20 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Stopped target Default.
-- Subject: Unit UNIT has finished shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished shutting down.
Aug 10 12:44:20 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Stopping podman-pause-fd35b2b9.scope.
-- Subject: Unit UNIT has begun shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has begun shutting down.
Aug 10 12:44:20 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Stopped D-Bus User Message Bus.
-- Subject: Unit UNIT has finished shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished shutting down.
Aug 10 12:44:20 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Stopped target Basic System.
-- Subject: Unit UNIT has finished shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished shutting down.
Aug 10 12:44:20 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Stopped target Timers.
-- Subject: Unit UNIT has finished shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished shutting down.
Aug 10 12:44:20 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Stopped Mark boot as successful after the user session has run 2 minutes.
-- Subject: Unit UNIT has finished shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished shutting down.
Aug 10 12:44:20 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Stopped target Sockets.
-- Subject: Unit UNIT has finished shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished shutting down.
Aug 10 12:44:20 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Stopped target Paths.
-- Subject: Unit UNIT has finished shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished shutting down.
Aug 10 12:44:20 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Closed D-Bus User Message Bus Socket.
-- Subject: Unit UNIT has finished shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished shutting down.
Aug 10 12:44:20 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Stopped podman-pause-fd35b2b9.scope.
-- Subject: Unit UNIT has finished shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished shutting down.
Aug 10 12:44:20 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Removed slice user.slice.
-- Subject: Unit UNIT has finished shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished shutting down.
Aug 10 12:44:20 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Reached target Shutdown.
-- Subject: Unit UNIT has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished starting up.
--
-- The start-up result is done.
Aug 10 12:44:20 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Started Exit the Session.
-- Subject: Unit UNIT has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished starting up.
--
-- The start-up result is done.
Aug 10 12:44:20 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[2505]: Reached target Exit the Session.
-- Subject: Unit UNIT has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit UNIT has finished starting up.
--
-- The start-up result is done.
Aug 10 12:44:20 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: user@1111.service: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit user@1111.service has successfully entered the 'dead' state.
Aug 10 12:44:20 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Stopped User Manager for UID 1111.
-- Subject: Unit user@1111.service has finished shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit user@1111.service has finished shutting down.
Aug 10 12:44:20 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Stopping User runtime directory /run/user/1111...
-- Subject: Unit user-runtime-dir@1111.service has begun shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit user-runtime-dir@1111.service has begun shutting down.
Aug 10 12:44:20 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: run-user-1111.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit run-user-1111.mount has successfully entered the 'dead' state.
Aug 10 12:44:20 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: user-runtime-dir@1111.service: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit user-runtime-dir@1111.service has successfully entered the 'dead' state.
Aug 10 12:44:20 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Stopped User runtime directory /run/user/1111.
-- Subject: Unit user-runtime-dir@1111.service has finished shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit user-runtime-dir@1111.service has finished shutting down.
Aug 10 12:44:20 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Removed slice User Slice of UID 1111.
-- Subject: Unit user-1111.slice has finished shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit user-1111.slice has finished shutting down.
Aug 10 12:44:20 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: user-1111.slice: Consumed 15.822s CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit user-1111.slice completed and consumed the indicated resources.
Aug 10 12:44:21 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[24978]: ansible-command Invoked with _raw_params=loginctl show-user -P State user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:44:21 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[25102]: ansible-systemd Invoked with name=systemd-logind state=stopped daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None scope=None
Aug 10 12:44:21 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Stopping Login Service...
-- Subject: Unit systemd-logind.service has begun shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit systemd-logind.service has begun shutting down.
Aug 10 12:44:21 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: systemd-logind.service: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit systemd-logind.service has successfully entered the 'dead' state.
Aug 10 12:44:21 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Stopped Login Service.
-- Subject: Unit systemd-logind.service has finished shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit systemd-logind.service has finished shutting down.
Aug 10 12:44:22 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[25230]: ansible-command Invoked with _raw_params=loginctl show-user -P State user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:44:22 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[25354]: ansible-stat Invoked with path=/var/lib/systemd/linger/user_quadlet_basic follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:44:25 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[25602]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:44:25 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[25731]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None
Aug 10 12:44:26 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[25855]: ansible-getent Invoked with database=group key=0 fail_key=False service=None split=None
Aug 10 12:44:26 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[25979]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:44:28 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay-compat457171338-merged.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay-compat457171338-merged.mount has successfully entered the 'dead' state.
Aug 10 12:44:28 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay-metacopy\x2dcheck2624436704-merged.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay-metacopy\x2dcheck2624436704-merged.mount has successfully entered the 'dead' state.
Aug 10 12:44:29 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:44:30 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[26406]: ansible-getent Invoked with database=group key=0 fail_key=False service=None split=None
Aug 10 12:44:31 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[26530]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:44:32 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[26655]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:44:32 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[26778]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-basic.network follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True
Aug 10 12:44:33 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[26877]: ansible-copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1723308272.7079954-24953-62286071663087/source dest=/etc/containers/systemd/quadlet-basic.network owner=root group=0 mode=0644 _original_basename=quadlet-basic.network follow=False checksum=19c9b17be2af9b9deca5c3bd327f048966750682 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None
Aug 10 12:44:33 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[27002]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None
Aug 10 12:44:33 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Reloading.
Aug 10 12:44:33 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:44:33 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:44:34 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[27155]: ansible-systemd Invoked with name=quadlet-basic-network.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None
Aug 10 12:44:34 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Starting quadlet-basic-network.service...
-- Subject: Unit quadlet-basic-network.service has begun start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-basic-network.service has begun starting up.
Aug 10 12:44:34 ip-10-31-12-150.us-east-1.aws.redhat.com quadlet-basic-network[27162]: quadlet-basic-name
Aug 10 12:44:34 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:44:34 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Started quadlet-basic-network.service.
-- Subject: Unit quadlet-basic-network.service has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-basic-network.service has finished starting up.
--
-- The start-up result is done.
Aug 10 12:44:35 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[27315]: ansible-getent Invoked with database=group key=0 fail_key=False service=None split=None
Aug 10 12:44:35 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[27439]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:44:37 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[27564]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:44:37 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[27687]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-basic-unused-network.network follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True
Aug 10 12:44:38 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[27786]: ansible-copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1723308277.4354873-25084-151832524133972/source dest=/etc/containers/systemd/quadlet-basic-unused-network.network owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=52c9d75ecaf81203cc1f1a3b1dd00fcd25067b01 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None
Aug 10 12:44:38 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[27911]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None
Aug 10 12:44:38 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Reloading.
Aug 10 12:44:38 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:44:38 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:44:39 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[28064]: ansible-systemd Invoked with name=quadlet-basic-unused-network-network.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None
Aug 10 12:44:39 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Starting quadlet-basic-unused-network-network.service...
-- Subject: Unit quadlet-basic-unused-network-network.service has begun start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-basic-unused-network-network.service has begun starting up.
Aug 10 12:44:39 ip-10-31-12-150.us-east-1.aws.redhat.com quadlet-basic-unused-network-network[28071]: systemd-quadlet-basic-unused-network
Aug 10 12:44:39 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:44:39 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Started quadlet-basic-unused-network-network.service.
-- Subject: Unit quadlet-basic-unused-network-network.service has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-basic-unused-network-network.service has finished starting up.
--
-- The start-up result is done.
Aug 10 12:44:40 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[28250]: ansible-getent Invoked with database=group key=0 fail_key=False service=None split=None
Aug 10 12:44:40 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[28374]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:44:41 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[28499]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:44:42 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[28622]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-basic-mysql.volume follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True
Aug 10 12:44:42 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[28721]: ansible-copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1723308282.0928502-25211-156796995190578/source dest=/etc/containers/systemd/quadlet-basic-mysql.volume owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=90a3571bfc7670328fe3f8fb625585613dbd9c4a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None
Aug 10 12:44:43 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[28846]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None
Aug 10 12:44:43 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Reloading.
Aug 10 12:44:43 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:44:43 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:44:43 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[28999]: ansible-systemd Invoked with name=quadlet-basic-mysql-volume.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None
Aug 10 12:44:43 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Starting quadlet-basic-mysql-volume.service...
-- Subject: Unit quadlet-basic-mysql-volume.service has begun start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-basic-mysql-volume.service has begun starting up.
Aug 10 12:44:43 ip-10-31-12-150.us-east-1.aws.redhat.com quadlet-basic-mysql-volume[29006]: quadlet-basic-mysql-name
Aug 10 12:44:43 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:44:43 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Started quadlet-basic-mysql-volume.service.
-- Subject: Unit quadlet-basic-mysql-volume.service has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-basic-mysql-volume.service has finished starting up.
--
-- The start-up result is done.
Aug 10 12:44:44 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[29135]: ansible-getent Invoked with database=group key=0 fail_key=False service=None split=None
Aug 10 12:44:45 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[29259]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:44:46 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[29384]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:44:47 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[29507]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-basic-unused-volume.volume follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True
Aug 10 12:44:47 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[29606]: ansible-copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1723308286.7509117-25338-244267414973948/source dest=/etc/containers/systemd/quadlet-basic-unused-volume.volume owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=fd0ae560360afa5541b866560b1e849d25e216ef backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None
Aug 10 12:44:47 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[29731]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None
Aug 10 12:44:47 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Reloading.
Aug 10 12:44:47 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:44:47 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:44:48 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[29884]: ansible-systemd Invoked with name=quadlet-basic-unused-volume-volume.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None
Aug 10 12:44:48 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Starting quadlet-basic-unused-volume-volume.service...
-- Subject: Unit quadlet-basic-unused-volume-volume.service has begun start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-basic-unused-volume-volume.service has begun starting up.
Aug 10 12:44:48 ip-10-31-12-150.us-east-1.aws.redhat.com quadlet-basic-unused-volume-volume[29891]: systemd-quadlet-basic-unused-volume
Aug 10 12:44:48 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:44:48 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Started quadlet-basic-unused-volume-volume.service.
-- Subject: Unit quadlet-basic-unused-volume-volume.service has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-basic-unused-volume-volume.service has finished starting up.
--
-- The start-up result is done.
Aug 10 12:44:49 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[30019]: ansible-getent Invoked with database=group key=0 fail_key=False service=None split=None
Aug 10 12:44:49 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[30143]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:44:51 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:44:51 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:44:58 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:44:58 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:44:59 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[30488]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:44:59 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[30611]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-basic-mysql.container follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True
Aug 10 12:44:59 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[30710]: ansible-copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1723308299.3636813-25472-57311856677097/source dest=/etc/containers/systemd/quadlet-basic-mysql.container owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=0b6cac7929623f1059e78ef39b8b0a25169b28a6 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None
Aug 10 12:45:00 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[30835]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None
Aug 10 12:45:00 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Reloading.
Aug 10 12:45:00 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:45:00 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[30988]: ansible-systemd Invoked with name=quadlet-basic-mysql.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Starting quadlet-basic-mysql.service...
-- Subject: Unit quadlet-basic-mysql.service has begun start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-basic-mysql.service has begun starting up.
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com NetworkManager[668]: [1723308301.2495] manager: (cni-podman2): new Bridge device (/org/freedesktop/NetworkManager/Devices/3)
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com NetworkManager[668]: [1723308301.2507] manager: (veth47da3d05): new Veth device (/org/freedesktop/NetworkManager/Devices/4)
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: IPv6: ADDRCONF(NETDEV_UP): veth47da3d05: link is not ready
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: cni-podman2: port 1(veth47da3d05) entered blocking state
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: cni-podman2: port 1(veth47da3d05) entered disabled state
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: device veth47da3d05 entered promiscuous mode
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com systemd-udevd[31086]: Using default interface naming scheme 'rhel-8.0'.
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com systemd-udevd[31086]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable.
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com systemd-udevd[31086]: Could not generate persistent MAC address for cni-podman2: No such file or directory
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com systemd-udevd[31087]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable.
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com systemd-udevd[31087]: Could not generate persistent MAC address for veth47da3d05: No such file or directory
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com NetworkManager[668]: [1723308301.3016] device (cni-podman2): state change: unmanaged -> unavailable (reason 'connection-assumed', sys-iface-state: 'external')
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com NetworkManager[668]: [1723308301.3022] device (cni-podman2): state change: unavailable -> disconnected (reason 'connection-assumed', sys-iface-state: 'external')
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com NetworkManager[668]: [1723308301.3032] device (cni-podman2): Activation: starting connection 'cni-podman2' (6d68de02-d5f1-407c-b353-1272f96dae2a)
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com NetworkManager[668]: [1723308301.3033] device (cni-podman2): state change: disconnected -> prepare (reason 'none', sys-iface-state: 'external')
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com NetworkManager[668]: [1723308301.3037] device (cni-podman2): state change: prepare -> config (reason 'none', sys-iface-state: 'external')
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com NetworkManager[668]: [1723308301.3039] device (cni-podman2): state change: config -> ip-config (reason 'none', sys-iface-state: 'external')
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com NetworkManager[668]: [1723308301.3041] device (cni-podman2): state change: ip-config -> ip-check (reason 'none', sys-iface-state: 'external')
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com dbus-daemon[580]: [system] Activating via systemd: service name='org.freedesktop.nm_dispatcher' unit='dbus-org.freedesktop.nm-dispatcher.service' requested by ':1.3' (uid=0 pid=668 comm="/usr/sbin/NetworkManager --no-daemon " label="system_u:system_r:NetworkManager_t:s0")
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Starting Network Manager Script Dispatcher Service...
-- Subject: Unit NetworkManager-dispatcher.service has begun start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit NetworkManager-dispatcher.service has begun starting up.
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: IPv6: ADDRCONF(NETDEV_CHANGE): veth47da3d05: link becomes ready
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: cni-podman2: port 1(veth47da3d05) entered blocking state
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: cni-podman2: port 1(veth47da3d05) entered forwarding state
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com NetworkManager[668]: [1723308301.3250] device (veth47da3d05): carrier: link connected
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com NetworkManager[668]: [1723308301.3253] device (cni-podman2): carrier: link connected
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com dbus-daemon[580]: [system] Successfully activated service 'org.freedesktop.nm_dispatcher'
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com NetworkManager[668]: [1723308301.3330] device (cni-podman2): state change: ip-check -> secondaries (reason 'none', sys-iface-state: 'external')
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Started Network Manager Script Dispatcher Service.
-- Subject: Unit NetworkManager-dispatcher.service has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit NetworkManager-dispatcher.service has finished starting up.
--
-- The start-up result is done.
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com NetworkManager[668]: [1723308301.3332] device (cni-podman2): state change: secondaries -> activated (reason 'none', sys-iface-state: 'external')
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com NetworkManager[668]: [1723308301.3338] device (cni-podman2): Activation: successful, device activated.
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com dnsmasq[31180]: listening on cni-podman2(#3): 192.168.29.1
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com dnsmasq[31184]: started, version 2.79 cachesize 150
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com dnsmasq[31184]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com dnsmasq[31184]: using local addresses only for domain dns.podman
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com dnsmasq[31184]: reading /etc/resolv.conf
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com dnsmasq[31184]: using local addresses only for domain dns.podman
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com dnsmasq[31184]: using nameserver 10.29.169.13#53
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com dnsmasq[31184]: using nameserver 10.29.170.12#53
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com dnsmasq[31184]: using nameserver 10.2.32.1#53
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com dnsmasq[31184]: read /run/containers/cni/dnsname/quadlet-basic-name/addnhosts - 1 addresses
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: Adding Red Hat flag eBPF/cgroup.
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Started quadlet-basic-mysql.service.
-- Subject: Unit quadlet-basic-mysql.service has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-basic-mysql.service has finished starting up.
--
-- The start-up result is done.
Aug 10 12:45:01 ip-10-31-12-150.us-east-1.aws.redhat.com quadlet-basic-mysql[30995]: 58bdf5eeb7797001c569ef152c2f84809d68a88883496ca3b0d69d68d08b060a
Aug 10 12:45:02 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[31377]: ansible-command Invoked with _raw_params=cat /etc/containers/systemd/quadlet-basic-mysql.container warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:45:02 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[31501]: ansible-command Invoked with _raw_params=cat /etc/containers/systemd/quadlet-basic.network warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:45:02 ip-10-31-12-150.us-east-1.aws.redhat.com dnsmasq[31184]: listening on cni-podman2(#3): fe80::6463:15ff:fead:2567%cni-podman2
Aug 10 12:45:02 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[31648]: ansible-command Invoked with _raw_params=cat /etc/containers/systemd/quadlet-basic-mysql.volume warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:45:03 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[31776]: ansible-command Invoked with _raw_params=podman exec quadlet-basic-mysql-name cat /tmp/test.json warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:45:06 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[32080]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:45:06 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[32216]: ansible-getent Invoked with database=passwd key=user_quadlet_basic fail_key=False service=None split=None
Aug 10 12:45:07 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[32361]: ansible-getent Invoked with database=group key=1111 fail_key=False service=None split=None
Aug 10 12:45:07 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[32494]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:45:08 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[32619]: ansible-command Invoked with _raw_params=getsubids user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:45:09 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[32743]: ansible-command Invoked with _raw_params=getsubids -g user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:45:11 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[32874]: ansible-stat Invoked with path=/run/user/1111 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:45:11 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: NetworkManager-dispatcher.service: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.
Aug 10 12:45:11 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[32997]: ansible-stat Invoked with path=/run/user/1111 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:45:12 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[33143]: ansible-getent Invoked with database=group key=1111 fail_key=False service=None split=None
Aug 10 12:45:13 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[33267]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:45:13 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[33392]: ansible-command Invoked with _raw_params=getsubids user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:45:14 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[33516]: ansible-command Invoked with _raw_params=getsubids -g user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:45:15 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[33640]: ansible-stat Invoked with path=/run/user/1111 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:45:15 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[33763]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:45:15 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[33886]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:45:17 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[34009]: ansible-getent Invoked with database=group key=1111 fail_key=False service=None split=None
Aug 10 12:45:17 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[34133]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:45:18 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[34258]: ansible-command Invoked with _raw_params=getsubids user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:45:18 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[34382]: ansible-command Invoked with _raw_params=getsubids -g user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:45:19 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[34506]: ansible-stat Invoked with path=/run/user/1111 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:45:19 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[34629]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:45:20 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[34752]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:45:21 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[34875]: ansible-getent Invoked with database=group key=1111 fail_key=False service=None split=None
Aug 10 12:45:22 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[34999]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:45:22 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[35124]: ansible-command Invoked with _raw_params=getsubids user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:45:23 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[35248]: ansible-command Invoked with _raw_params=getsubids -g user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:45:24 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[35372]: ansible-stat Invoked with path=/run/user/1111 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:45:24 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[35495]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:45:25 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[35618]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:45:26 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[35741]: ansible-getent Invoked with database=group key=1111 fail_key=False service=None split=None
Aug 10 12:45:26 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[35865]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:45:27 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[35990]: ansible-command Invoked with _raw_params=getsubids user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:45:27 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[36114]: ansible-command Invoked with _raw_params=getsubids -g user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:45:28 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[36238]: ansible-stat Invoked with path=/run/user/1111 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:45:29 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[36361]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:45:29 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[36484]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:45:31 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[36607]: ansible-getent Invoked with database=group key=1111 fail_key=False service=None split=None
Aug 10 12:45:31 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[36731]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:45:31 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[36856]: ansible-command Invoked with _raw_params=getsubids user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:45:32 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[36980]: ansible-command Invoked with _raw_params=getsubids -g user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:45:33 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[37104]: ansible-stat Invoked with path=/run/user/1111 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:45:33 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[37227]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:45:34 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[37350]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:45:35 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[37473]: ansible-stat Invoked with path=/run/user/1111 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:45:36 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[37596]: ansible-user Invoked with name=user_quadlet_basic uid=1111 state=absent non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on ip-10-31-12-150.us-east-1.aws.redhat.com update_password=always group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None
Aug 10 12:45:36 ip-10-31-12-150.us-east-1.aws.redhat.com userdel[37600]: delete user 'user_quadlet_basic'
Aug 10 12:45:36 ip-10-31-12-150.us-east-1.aws.redhat.com userdel[37600]: removed group 'user_quadlet_basic' owned by 'user_quadlet_basic'
Aug 10 12:45:36 ip-10-31-12-150.us-east-1.aws.redhat.com userdel[37600]: removed shadow group 'user_quadlet_basic' owned by 'user_quadlet_basic'
Aug 10 12:45:38 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[37853]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:45:39 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[37982]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None
Aug 10 12:45:39 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[38106]: ansible-getent Invoked with database=group key=0 fail_key=False service=None split=None
Aug 10 12:45:40 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[38230]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:45:44 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[38615]: ansible-getent Invoked with database=group key=0 fail_key=False service=None split=None
Aug 10 12:45:44 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[38739]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:45:45 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[38864]: ansible-systemd Invoked with name=quadlet-basic-mysql.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None user=None
Aug 10 12:45:46 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Reloading.
Aug 10 12:45:46 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:45:46 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:45:46 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Stopping quadlet-basic-mysql.service...
-- Subject: Unit quadlet-basic-mysql.service has begun shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-basic-mysql.service has begun shutting down.
Aug 10 12:45:47 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: cni-podman2: port 1(veth47da3d05) entered disabled state
Aug 10 12:45:47 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: device veth47da3d05 left promiscuous mode
Aug 10 12:45:47 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: cni-podman2: port 1(veth47da3d05) entered disabled state
Aug 10 12:45:47 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: run-netns-netns\x2d99f450ce\x2d9af5\x2d2bd7\x2dc81c\x2da6a90ae1d172.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit run-netns-netns\x2d99f450ce\x2d9af5\x2d2bd7\x2dc81c\x2da6a90ae1d172.mount has successfully entered the 'dead' state.
Aug 10 12:45:47 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-58bdf5eeb7797001c569ef152c2f84809d68a88883496ca3b0d69d68d08b060a-userdata-shm.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay\x2dcontainers-58bdf5eeb7797001c569ef152c2f84809d68a88883496ca3b0d69d68d08b060a-userdata-shm.mount has successfully entered the 'dead' state.
Aug 10 12:45:48 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay-5ecc5a83917cbb501be1e3f5dd482ed1723fb0c50e85b01270e0bd8f02e8d0aa-merged.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay-5ecc5a83917cbb501be1e3f5dd482ed1723fb0c50e85b01270e0bd8f02e8d0aa-merged.mount has successfully entered the 'dead' state.
Aug 10 12:45:48 ip-10-31-12-150.us-east-1.aws.redhat.com quadlet-basic-mysql[38900]: 58bdf5eeb7797001c569ef152c2f84809d68a88883496ca3b0d69d68d08b060a
Aug 10 12:45:48 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:45:48 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: quadlet-basic-mysql.service: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit quadlet-basic-mysql.service has successfully entered the 'dead' state.
Aug 10 12:45:48 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Stopped quadlet-basic-mysql.service.
-- Subject: Unit quadlet-basic-mysql.service has finished shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-basic-mysql.service has finished shutting down.
Aug 10 12:45:48 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[39170]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-basic-mysql.container follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:45:49 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[39418]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-basic-mysql.container state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:45:49 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[39541]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None
Aug 10 12:45:49 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Reloading.
Aug 10 12:45:49 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:45:49 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:45:50 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:45:50 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[39824]: ansible-command Invoked with _raw_params=podman image prune --all -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:45:51 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:45:51 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[39955]: ansible-command Invoked with _raw_params=podman images -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:45:52 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[40085]: ansible-command Invoked with _raw_params=podman volume ls -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:45:52 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:45:52 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[40216]: ansible-command Invoked with _raw_params=podman ps --noheading warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:45:53 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[40346]: ansible-command Invoked with _raw_params=podman network ls -n -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:45:53 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:45:54 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[40681]: ansible-service_facts Invoked
Aug 10 12:45:56 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[40892]: ansible-getent Invoked with database=group key=0 fail_key=False service=None split=None
Aug 10 12:45:56 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[41016]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:45:57 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[41141]: ansible-systemd Invoked with name=quadlet-basic-unused-volume-volume.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None user=None
Aug 10 12:45:57 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Reloading.
Aug 10 12:45:58 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:45:58 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:45:58 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: quadlet-basic-unused-volume-volume.service: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit quadlet-basic-unused-volume-volume.service has successfully entered the 'dead' state.
Aug 10 12:45:58 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Stopped quadlet-basic-unused-volume-volume.service.
-- Subject: Unit quadlet-basic-unused-volume-volume.service has finished shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-basic-unused-volume-volume.service has finished shutting down.
Aug 10 12:45:58 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[41298]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-basic-unused-volume.volume follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:45:59 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[41546]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-basic-unused-volume.volume state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:45:59 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[41669]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None
Aug 10 12:45:59 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Reloading.
Aug 10 12:45:59 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:45:59 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:46:00 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:46:00 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[41952]: ansible-command Invoked with _raw_params=podman image prune --all -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:46:01 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[42083]: ansible-command Invoked with _raw_params=podman images -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:46:01 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[42213]: ansible-command Invoked with _raw_params=podman volume ls -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:46:02 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[42343]: ansible-command Invoked with _raw_params=podman ps --noheading warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:46:02 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[42473]: ansible-command Invoked with _raw_params=podman network ls -n -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:46:02 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:46:03 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:46:03 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[42809]: ansible-service_facts Invoked
Aug 10 12:46:05 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[43020]: ansible-getent Invoked with database=group key=0 fail_key=False service=None split=None
Aug 10 12:46:06 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[43144]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:46:07 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[43269]: ansible-systemd Invoked with name=quadlet-basic-mysql-volume.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None user=None
Aug 10 12:46:07 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Reloading.
Aug 10 12:46:07 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:46:07 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:46:07 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: quadlet-basic-mysql-volume.service: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit quadlet-basic-mysql-volume.service has successfully entered the 'dead' state.
Aug 10 12:46:07 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Stopped quadlet-basic-mysql-volume.service.
-- Subject: Unit quadlet-basic-mysql-volume.service has finished shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-basic-mysql-volume.service has finished shutting down.
Aug 10 12:46:08 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[43426]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-basic-mysql.volume follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:46:09 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[43674]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-basic-mysql.volume state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:46:09 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[43797]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None
Aug 10 12:46:09 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Reloading.
Aug 10 12:46:09 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:46:09 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:46:10 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:46:10 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[44080]: ansible-command Invoked with _raw_params=podman image prune --all -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:46:10 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:46:11 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[44210]: ansible-command Invoked with _raw_params=podman images -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:46:11 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:46:11 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[44340]: ansible-command Invoked with _raw_params=podman volume ls -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:46:12 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[44470]: ansible-command Invoked with _raw_params=podman ps --noheading warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:46:12 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[44600]: ansible-command Invoked with _raw_params=podman network ls -n -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:46:12 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:46:13 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[44936]: ansible-service_facts Invoked
Aug 10 12:46:15 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[45147]: ansible-getent Invoked with database=group key=0 fail_key=False service=None split=None
Aug 10 12:46:15 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[45271]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:46:17 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[45396]: ansible-systemd Invoked with name=quadlet-basic-unused-network-network.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None user=None
Aug 10 12:46:17 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Reloading.
Aug 10 12:46:17 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:46:17 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:46:17 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: quadlet-basic-unused-network-network.service: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit quadlet-basic-unused-network-network.service has successfully entered the 'dead' state.
Aug 10 12:46:17 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Stopped quadlet-basic-unused-network-network.service.
-- Subject: Unit quadlet-basic-unused-network-network.service has finished shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-basic-unused-network-network.service has finished shutting down.
Aug 10 12:46:17 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[45553]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-basic-unused-network.network follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:46:18 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[45801]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-basic-unused-network.network state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:46:19 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[45924]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None
Aug 10 12:46:19 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Reloading.
Aug 10 12:46:19 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:46:19 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:46:19 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:46:20 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[46283]: ansible-command Invoked with _raw_params=podman image prune --all -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:46:20 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:46:20 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[46413]: ansible-command Invoked with _raw_params=podman images -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:46:21 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[46544]: ansible-command Invoked with _raw_params=podman volume ls -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:46:21 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[46674]: ansible-command Invoked with _raw_params=podman ps --noheading warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:46:21 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:46:22 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[46804]: ansible-command Invoked with _raw_params=podman network ls -n -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:46:22 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:46:23 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[47114]: ansible-service_facts Invoked
Aug 10 12:46:25 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[47325]: ansible-getent Invoked with database=group key=0 fail_key=False service=None split=None
Aug 10 12:46:25 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[47449]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:46:26 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[47574]: ansible-systemd Invoked with name=quadlet-basic-network.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None user=None
Aug 10 12:46:26 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Reloading.
Aug 10 12:46:27 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:46:27 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:46:27 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: quadlet-basic-network.service: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit quadlet-basic-network.service has successfully entered the 'dead' state.
Aug 10 12:46:27 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Stopped quadlet-basic-network.service.
-- Subject: Unit quadlet-basic-network.service has finished shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-basic-network.service has finished shutting down.
Aug 10 12:46:27 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[47731]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-basic.network follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:46:28 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[47979]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-basic.network state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:46:28 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[48102]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None
Aug 10 12:46:28 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Reloading.
Aug 10 12:46:28 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:46:28 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:46:29 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:46:29 ip-10-31-12-150.us-east-1.aws.redhat.com NetworkManager[668]: [1723308389.5119] device (cni-podman2): state change: activated -> unmanaged (reason 'unmanaged', sys-iface-state: 'removed')
Aug 10 12:46:29 ip-10-31-12-150.us-east-1.aws.redhat.com dbus-daemon[580]: [system] Activating via systemd: service name='org.freedesktop.nm_dispatcher' unit='dbus-org.freedesktop.nm-dispatcher.service' requested by ':1.3' (uid=0 pid=668 comm="/usr/sbin/NetworkManager --no-daemon " label="system_u:system_r:NetworkManager_t:s0")
Aug 10 12:46:29 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Starting Network Manager Script Dispatcher Service...
-- Subject: Unit NetworkManager-dispatcher.service has begun start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit NetworkManager-dispatcher.service has begun starting up.
Aug 10 12:46:29 ip-10-31-12-150.us-east-1.aws.redhat.com dbus-daemon[580]: [system] Successfully activated service 'org.freedesktop.nm_dispatcher'
Aug 10 12:46:29 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Started Network Manager Script Dispatcher Service.
-- Subject: Unit NetworkManager-dispatcher.service has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit NetworkManager-dispatcher.service has finished starting up.
--
-- The start-up result is done.
Aug 10 12:46:30 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[48446]: ansible-command Invoked with _raw_params=podman image prune --all -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:46:30 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:46:30 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[48576]: ansible-command Invoked with _raw_params=podman images -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:46:31 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[48706]: ansible-command Invoked with _raw_params=podman volume ls -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:46:31 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[48836]: ansible-command Invoked with _raw_params=podman ps --noheading warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:46:31 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[48966]: ansible-command Invoked with _raw_params=podman network ls -n -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:46:31 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:46:32 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[49252]: ansible-service_facts Invoked
Aug 10 12:46:36 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[49501]: ansible-setup Invoked with gather_subset=['all'] gather_timeout=10 filter=* fact_path=/etc/ansible/facts.d
Aug 10 12:46:37 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[49649]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:46:38 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[49772]: ansible-dnf Invoked with name=['python3-pyasn1', 'python3-cryptography', 'python3-dbus'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None
Aug 10 12:46:39 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: NetworkManager-dispatcher.service: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.
Aug 10 12:46:42 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[49899]: ansible-dnf Invoked with name=['certmonger'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None
Aug 10 12:46:45 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[50026]: ansible-file Invoked with name=/etc/certmonger//pre-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//pre-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:46:45 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[50149]: ansible-file Invoked with name=/etc/certmonger//post-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//post-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:46:46 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[50272]: ansible-systemd Invoked with name=certmonger state=started enabled=True daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None
Aug 10 12:46:47 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[50399]: ansible-fedora.linux_system_roles.certificate_request Invoked with name=quadlet_demo dns=['localhost'] directory=/etc/pki/tls wait=True ca=self-sign __header=#
# Ansible managed
#
# system_role:certificate
provider_config_directory=/etc/certmonger provider=certmonger key_usage=['digitalSignature', 'keyEncipherment'] extended_key_usage=['id-kp-serverAuth', 'id-kp-clientAuth'] auto_renew=True ip=None email=None common_name=None country=None state=None locality=None organization=None organizational_unit=None contact_email=None key_size=None owner=None group=None mode=None principal=None run_before=None run_after=None
Aug 10 12:46:47 ip-10-31-12-150.us-east-1.aws.redhat.com certmonger[671]: 2024-08-10 12:46:47 [671] Wrote to /var/lib/certmonger/requests/20240810164647
Aug 10 12:46:47 ip-10-31-12-150.us-east-1.aws.redhat.com certmonger[671]: 2024-08-10 12:46:47 [671] Wrote to /var/lib/certmonger/requests/20240810164647
Aug 10 12:46:47 ip-10-31-12-150.us-east-1.aws.redhat.com certmonger[671]: 2024-08-10 12:46:47 [671] Wrote to /var/lib/certmonger/requests/20240810164647
Aug 10 12:46:47 ip-10-31-12-150.us-east-1.aws.redhat.com certmonger[671]: 2024-08-10 12:46:47 [671] Wrote to /var/lib/certmonger/requests/20240810164647
Aug 10 12:46:47 ip-10-31-12-150.us-east-1.aws.redhat.com certmonger[671]: 2024-08-10 12:46:47 [671] Wrote to /var/lib/certmonger/requests/20240810164647
Aug 10 12:46:47 ip-10-31-12-150.us-east-1.aws.redhat.com certmonger[671]: 2024-08-10 12:46:47 [671] Wrote to /var/lib/certmonger/requests/20240810164647
Aug 10 12:46:47 ip-10-31-12-150.us-east-1.aws.redhat.com certmonger[671]: 2024-08-10 12:46:47 [671] Wrote to /var/lib/certmonger/requests/20240810164647
Aug 10 12:46:47 ip-10-31-12-150.us-east-1.aws.redhat.com certmonger[671]: 2024-08-10 12:46:47 [671] Wrote to /var/lib/certmonger/requests/20240810164647
Aug 10 12:46:47 ip-10-31-12-150.us-east-1.aws.redhat.com certmonger[671]: 2024-08-10 12:46:47 [671] Wrote to /var/lib/certmonger/requests/20240810164647
Aug 10 12:46:47 ip-10-31-12-150.us-east-1.aws.redhat.com certmonger[671]: 2024-08-10 12:46:47 [671] Wrote to /var/lib/certmonger/requests/20240810164647
Aug 10 12:46:47 ip-10-31-12-150.us-east-1.aws.redhat.com certmonger[671]: 2024-08-10 12:46:47 [671] Wrote to /var/lib/certmonger/requests/20240810164647
Aug 10 12:46:47 ip-10-31-12-150.us-east-1.aws.redhat.com certmonger[671]: 2024-08-10 12:46:47 [671] Wrote to /var/lib/certmonger/requests/20240810164647
Aug 10 12:46:47 ip-10-31-12-150.us-east-1.aws.redhat.com certmonger[671]: 2024-08-10 12:46:47 [671] Wrote to /var/lib/certmonger/requests/20240810164647
Aug 10 12:46:47 ip-10-31-12-150.us-east-1.aws.redhat.com certmonger[671]: 2024-08-10 12:46:47 [671] Wrote to /var/lib/certmonger/requests/20240810164647
Aug 10 12:46:47 ip-10-31-12-150.us-east-1.aws.redhat.com certmonger[671]: 2024-08-10 12:46:47 [671] Wrote to /var/lib/certmonger/requests/20240810164647
Aug 10 12:46:47 ip-10-31-12-150.us-east-1.aws.redhat.com certmonger[671]: 2024-08-10 12:46:47 [671] Wrote to /var/lib/certmonger/requests/20240810164647
Aug 10 12:46:47 ip-10-31-12-150.us-east-1.aws.redhat.com certmonger[671]: 2024-08-10 12:46:47 [671] Wrote to /var/lib/certmonger/requests/20240810164647
Aug 10 12:46:47 ip-10-31-12-150.us-east-1.aws.redhat.com certmonger[671]: 2024-08-10 12:46:47 [671] Wrote to /var/lib/certmonger/requests/20240810164647
Aug 10 12:46:47 ip-10-31-12-150.us-east-1.aws.redhat.com certmonger[671]: 2024-08-10 12:46:47 [671] Wrote to /var/lib/certmonger/requests/20240810164647
Aug 10 12:46:47 ip-10-31-12-150.us-east-1.aws.redhat.com certmonger[671]: 2024-08-10 12:46:47 [671] Wrote to /var/lib/certmonger/requests/20240810164647
Aug 10 12:46:47 ip-10-31-12-150.us-east-1.aws.redhat.com certmonger[671]: 2024-08-10 12:46:47 [671] Wrote to /var/lib/certmonger/requests/20240810164647
Aug 10 12:46:47 ip-10-31-12-150.us-east-1.aws.redhat.com certmonger[671]: 2024-08-10 12:46:47 [671] Wrote to /var/lib/certmonger/requests/20240810164647
Aug 10 12:46:47 ip-10-31-12-150.us-east-1.aws.redhat.com certmonger[671]: 2024-08-10 12:46:47 [671] Wrote to /var/lib/certmonger/requests/20240810164647
Aug 10 12:46:47 ip-10-31-12-150.us-east-1.aws.redhat.com certmonger[671]: 2024-08-10 12:46:47 [671] Wrote to /var/lib/certmonger/requests/20240810164647
Aug 10 12:46:47 ip-10-31-12-150.us-east-1.aws.redhat.com certmonger[671]: 2024-08-10 12:46:47 [671] Wrote to /var/lib/certmonger/requests/20240810164647
Aug 10 12:46:47 ip-10-31-12-150.us-east-1.aws.redhat.com certmonger[671]: 2024-08-10 12:46:47 [671] Wrote to /var/lib/certmonger/requests/20240810164647
Aug 10 12:46:47 ip-10-31-12-150.us-east-1.aws.redhat.com certmonger[671]: 2024-08-10 12:46:47 [671] Wrote to /var/lib/certmonger/requests/20240810164647
Aug 10 12:46:47 ip-10-31-12-150.us-east-1.aws.redhat.com certmonger[50415]: Certificate in file "/etc/pki/tls/certs/quadlet_demo.crt" issued by CA and saved.
Aug 10 12:46:47 ip-10-31-12-150.us-east-1.aws.redhat.com certmonger[671]: 2024-08-10 12:46:47 [671] Wrote to /var/lib/certmonger/requests/20240810164647
Aug 10 12:46:48 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[50537]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt
Aug 10 12:46:48 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[50660]: ansible-slurp Invoked with path=/etc/pki/tls/private/quadlet_demo.key src=/etc/pki/tls/private/quadlet_demo.key
Aug 10 12:46:48 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[50783]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt
Aug 10 12:46:49 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[50906]: ansible-command Invoked with _raw_params=getcert stop-tracking -f /etc/pki/tls/certs/quadlet_demo.crt warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:46:49 ip-10-31-12-150.us-east-1.aws.redhat.com certmonger[671]: 2024-08-10 12:46:49 [671] Wrote to /var/lib/certmonger/requests/20240810164647
Aug 10 12:46:49 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[51030]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:46:50 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[51153]: ansible-file Invoked with path=/etc/pki/tls/private/quadlet_demo.key state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:46:50 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[51276]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:46:50 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[51399]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:46:53 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[51647]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:46:54 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[51776]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None
Aug 10 12:46:54 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[51900]: ansible-getent Invoked with database=group key=0 fail_key=False service=None split=None
Aug 10 12:46:55 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[52024]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:46:56 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[52149]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:46:56 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[52272]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:46:57 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[52395]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None
Aug 10 12:47:00 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[52522]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None
Aug 10 12:47:01 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[52649]: ansible-systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None
Aug 10 12:47:02 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[52776]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None
Aug 10 12:47:02 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[52899]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None
Aug 10 12:47:04 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:47:04 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:47:05 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:47:05 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:47:06 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[53473]: ansible-getent Invoked with database=group key=0 fail_key=False service=None split=None
Aug 10 12:47:07 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[53597]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:47:08 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[53722]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:47:09 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[53845]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo.network follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True
Aug 10 12:47:09 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[53944]: ansible-copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1723308428.7586992-28296-197568005891150/source dest=/etc/containers/systemd/quadlet-demo.network owner=root group=0 mode=0644 _original_basename=quadlet-demo.network follow=False checksum=e57c08d49aff4bae8daab138d913aeddaa8682a0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None
Aug 10 12:47:10 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[54069]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None
Aug 10 12:47:10 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Reloading.
Aug 10 12:47:10 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:47:10 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:47:10 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[54222]: ansible-systemd Invoked with name=quadlet-demo-network.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None
Aug 10 12:47:10 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Starting quadlet-demo-network.service...
-- Subject: Unit quadlet-demo-network.service has begun start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-demo-network.service has begun starting up.
Aug 10 12:47:10 ip-10-31-12-150.us-east-1.aws.redhat.com quadlet-demo-network[54229]: systemd-quadlet-demo
Aug 10 12:47:10 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:47:10 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Started quadlet-demo-network.service.
-- Subject: Unit quadlet-demo-network.service has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-demo-network.service has finished starting up.
--
-- The start-up result is done.
Aug 10 12:47:11 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[54382]: ansible-getent Invoked with database=group key=0 fail_key=False service=None split=None
Aug 10 12:47:11 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[54506]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:47:13 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[54631]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:47:13 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[54754]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.volume follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True
Aug 10 12:47:13 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[54853]: ansible-copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1723308433.3081214-28423-261850214882015/source dest=/etc/containers/systemd/quadlet-demo-mysql.volume owner=root group=0 mode=0644 _original_basename=quadlet-demo-mysql.volume follow=False checksum=585f8cbdf0ec73000f9227dcffbef71e9552ea4a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None
Aug 10 12:47:14 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[54978]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None
Aug 10 12:47:14 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Reloading.
Aug 10 12:47:14 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:47:14 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:47:15 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[55131]: ansible-systemd Invoked with name=quadlet-demo-mysql-volume.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None
Aug 10 12:47:15 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Starting quadlet-demo-mysql-volume.service...
-- Subject: Unit quadlet-demo-mysql-volume.service has begun start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-demo-mysql-volume.service has begun starting up.
Aug 10 12:47:15 ip-10-31-12-150.us-east-1.aws.redhat.com quadlet-demo-mysql-volume[55138]: systemd-quadlet-demo-mysql
Aug 10 12:47:15 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:47:15 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Started quadlet-demo-mysql-volume.service.
-- Subject: Unit quadlet-demo-mysql-volume.service has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-demo-mysql-volume.service has finished starting up.
--
-- The start-up result is done.
Aug 10 12:47:16 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[55266]: ansible-getent Invoked with database=group key=0 fail_key=False service=None split=None
Aug 10 12:47:16 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[55390]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:47:17 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[55515]: ansible-file Invoked with path=/tmp/quadlet_demo state=directory owner=root group=root mode=0777 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:47:25 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:47:25 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:47:26 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[55859]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:47:26 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[55982]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.container follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True
Aug 10 12:47:26 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[56081]: ansible-copy Invoked with dest=/etc/containers/systemd/quadlet-demo-mysql.container owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1723308446.193933-28568-131028171043796/source _original_basename=tmp_99171qg follow=False checksum=ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None
Aug 10 12:47:27 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[56206]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None
Aug 10 12:47:27 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Reloading.
Aug 10 12:47:27 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:47:27 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:47:27 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[56359]: ansible-systemd Invoked with name=quadlet-demo-mysql.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None
Aug 10 12:47:27 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Starting quadlet-demo-mysql.service...
-- Subject: Unit quadlet-demo-mysql.service has begun start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-demo-mysql.service has begun starting up.
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: IPv6: ADDRCONF(NETDEV_UP): vethbde53e09: link is not ready
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: cni-podman2: port 1(vethbde53e09) entered blocking state
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: cni-podman2: port 1(vethbde53e09) entered disabled state
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: device vethbde53e09 entered promiscuous mode
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: cni-podman2: port 1(vethbde53e09) entered blocking state
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: cni-podman2: port 1(vethbde53e09) entered forwarding state
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vethbde53e09: link becomes ready
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com NetworkManager[668]: [1723308448.0502] device (cni-podman2): carrier: link connected
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com NetworkManager[668]: [1723308448.0507] manager: (cni-podman2): new Bridge device (/org/freedesktop/NetworkManager/Devices/5)
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com NetworkManager[668]: [1723308448.0522] device (vethbde53e09): carrier: link connected
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com NetworkManager[668]: [1723308448.0525] manager: (vethbde53e09): new Veth device (/org/freedesktop/NetworkManager/Devices/6)
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com systemd-udevd[56439]: Using default interface naming scheme 'rhel-8.0'.
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com systemd-udevd[56439]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable.
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com systemd-udevd[56440]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable.
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com systemd-udevd[56440]: Could not generate persistent MAC address for vethbde53e09: No such file or directory
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com NetworkManager[668]: [1723308448.0678] device (cni-podman2): state change: unmanaged -> unavailable (reason 'connection-assumed', sys-iface-state: 'external')
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com NetworkManager[668]: [1723308448.0685] device (cni-podman2): state change: unavailable -> disconnected (reason 'connection-assumed', sys-iface-state: 'external')
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com NetworkManager[668]: [1723308448.0693] device (cni-podman2): Activation: starting connection 'cni-podman2' (172f2f71-5af8-49dc-ad21-421eff2df762)
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com NetworkManager[668]: [1723308448.0694] device (cni-podman2): state change: disconnected -> prepare (reason 'none', sys-iface-state: 'external')
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com NetworkManager[668]: [1723308448.0697] device (cni-podman2): state change: prepare -> config (reason 'none', sys-iface-state: 'external')
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com NetworkManager[668]: [1723308448.0700] device (cni-podman2): state change: config -> ip-config (reason 'none', sys-iface-state: 'external')
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com NetworkManager[668]: [1723308448.0702] device (cni-podman2): state change: ip-config -> ip-check (reason 'none', sys-iface-state: 'external')
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com dbus-daemon[580]: [system] Activating via systemd: service name='org.freedesktop.nm_dispatcher' unit='dbus-org.freedesktop.nm-dispatcher.service' requested by ':1.3' (uid=0 pid=668 comm="/usr/sbin/NetworkManager --no-daemon " label="system_u:system_r:NetworkManager_t:s0")
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Starting Network Manager Script Dispatcher Service...
-- Subject: Unit NetworkManager-dispatcher.service has begun start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit NetworkManager-dispatcher.service has begun starting up.
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com dbus-daemon[580]: [system] Successfully activated service 'org.freedesktop.nm_dispatcher'
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Started Network Manager Script Dispatcher Service.
-- Subject: Unit NetworkManager-dispatcher.service has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit NetworkManager-dispatcher.service has finished starting up.
--
-- The start-up result is done.
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com NetworkManager[668]: [1723308448.1224] device (cni-podman2): state change: ip-check -> secondaries (reason 'none', sys-iface-state: 'external')
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com NetworkManager[668]: [1723308448.1227] device (cni-podman2): state change: secondaries -> activated (reason 'none', sys-iface-state: 'external')
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com NetworkManager[668]: [1723308448.1232] device (cni-podman2): Activation: successful, device activated.
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com dnsmasq[56495]: listening on cni-podman2(#5): 192.168.30.1
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com dnsmasq[56510]: started, version 2.79 cachesize 150
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com dnsmasq[56510]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com dnsmasq[56510]: using local addresses only for domain dns.podman
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com dnsmasq[56510]: reading /etc/resolv.conf
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com dnsmasq[56510]: using local addresses only for domain dns.podman
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com dnsmasq[56510]: using nameserver 10.29.169.13#53
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com dnsmasq[56510]: using nameserver 10.29.170.12#53
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com dnsmasq[56510]: using nameserver 10.2.32.1#53
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com dnsmasq[56510]: read /run/containers/cni/dnsname/systemd-quadlet-demo/addnhosts - 1 addresses
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Started /usr/bin/podman healthcheck run 77ecf6ddf9f0a56506ba9dd67ea2af32e2713127dc8d66f8b9853e38f9021fc6.
-- Subject: Unit 77ecf6ddf9f0a56506ba9dd67ea2af32e2713127dc8d66f8b9853e38f9021fc6.timer has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit 77ecf6ddf9f0a56506ba9dd67ea2af32e2713127dc8d66f8b9853e38f9021fc6.timer has finished starting up.
--
-- The start-up result is done.
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Started quadlet-demo-mysql.service.
-- Subject: Unit quadlet-demo-mysql.service has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-demo-mysql.service has finished starting up.
--
-- The start-up result is done.
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Started /usr/bin/podman healthcheck run 77ecf6ddf9f0a56506ba9dd67ea2af32e2713127dc8d66f8b9853e38f9021fc6.
-- Subject: Unit 77ecf6ddf9f0a56506ba9dd67ea2af32e2713127dc8d66f8b9853e38f9021fc6.service has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit 77ecf6ddf9f0a56506ba9dd67ea2af32e2713127dc8d66f8b9853e38f9021fc6.service has finished starting up.
--
-- The start-up result is done.
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com quadlet-demo-mysql[56366]: 77ecf6ddf9f0a56506ba9dd67ea2af32e2713127dc8d66f8b9853e38f9021fc6
Aug 10 12:47:28 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: 77ecf6ddf9f0a56506ba9dd67ea2af32e2713127dc8d66f8b9853e38f9021fc6.service: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit 77ecf6ddf9f0a56506ba9dd67ea2af32e2713127dc8d66f8b9853e38f9021fc6.service has successfully entered the 'dead' state.
Aug 10 12:47:29 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[56748]: ansible-getent Invoked with database=group key=0 fail_key=False service=None split=None
Aug 10 12:47:29 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[56875]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:47:29 ip-10-31-12-150.us-east-1.aws.redhat.com dnsmasq[56510]: listening on cni-podman2(#5): fe80::8088:3eff:fe75:a703%cni-podman2
Aug 10 12:47:30 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[57024]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:47:31 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[57147]: ansible-stat Invoked with path=/etc/containers/systemd/envoy-proxy-configmap.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True
Aug 10 12:47:31 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[57271]: ansible-copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1723308451.0169137-28693-186169960018362/source dest=/etc/containers/systemd/envoy-proxy-configmap.yml owner=root group=0 mode=0644 _original_basename=envoy-proxy-configmap.yml follow=False checksum=d681c7d56f912150d041873e880818b22a90c188 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None
Aug 10 12:47:32 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[57396]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None
Aug 10 12:47:32 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Reloading.
Aug 10 12:47:32 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:47:32 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:47:33 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[57549]: ansible-getent Invoked with database=group key=0 fail_key=False service=None split=None
Aug 10 12:47:33 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[57690]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:47:35 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[57835]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:47:35 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[57958]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True
Aug 10 12:47:36 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[58057]: ansible-copy Invoked with dest=/etc/containers/systemd/quadlet-demo.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1723308455.3186655-28815-246988955991842/source _original_basename=tmpebj2vsmz follow=False checksum=998dccde0483b1654327a46ddd89cbaa47650370 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None
Aug 10 12:47:36 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[58189]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None
Aug 10 12:47:36 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Reloading.
Aug 10 12:47:36 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:47:36 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:47:37 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[58342]: ansible-getent Invoked with database=group key=0 fail_key=False service=None split=None
Aug 10 12:47:38 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[58466]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:47:38 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: NetworkManager-dispatcher.service: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.
Aug 10 12:47:38 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[58609]: ansible-slurp Invoked with path=/etc/containers/systemd/quadlet-demo.yml src=/etc/containers/systemd/quadlet-demo.yml
Aug 10 12:47:39 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[58737]: ansible-file Invoked with path=/tmp/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:47:40 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[58860]: ansible-file Invoked with path=/tmp/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:47:58 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[59444]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:47:58 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[59567]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo.kube follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True
Aug 10 12:47:58 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Started /usr/bin/podman healthcheck run 77ecf6ddf9f0a56506ba9dd67ea2af32e2713127dc8d66f8b9853e38f9021fc6.
-- Subject: Unit 77ecf6ddf9f0a56506ba9dd67ea2af32e2713127dc8d66f8b9853e38f9021fc6.service has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit 77ecf6ddf9f0a56506ba9dd67ea2af32e2713127dc8d66f8b9853e38f9021fc6.service has finished starting up.
--
-- The start-up result is done.
Aug 10 12:47:59 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: 77ecf6ddf9f0a56506ba9dd67ea2af32e2713127dc8d66f8b9853e38f9021fc6.service: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit 77ecf6ddf9f0a56506ba9dd67ea2af32e2713127dc8d66f8b9853e38f9021fc6.service has successfully entered the 'dead' state.
Aug 10 12:47:59 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[59678]: ansible-copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1723308478.5610893-28968-259954300491430/source dest=/etc/containers/systemd/quadlet-demo.kube owner=root group=0 mode=0644 _original_basename=quadlet-demo.kube follow=False checksum=7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None
Aug 10 12:47:59 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[59817]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None
Aug 10 12:47:59 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Reloading.
Aug 10 12:47:59 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:47:59 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:48:00 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[59970]: ansible-systemd Invoked with name=quadlet-demo.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None
Aug 10 12:48:00 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Starting quadlet-demo.service...
-- Subject: Unit quadlet-demo.service has begun start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-demo.service has begun starting up.
Aug 10 12:48:00 ip-10-31-12-150.us-east-1.aws.redhat.com quadlet-demo[59977]: Pods stopped:
Aug 10 12:48:00 ip-10-31-12-150.us-east-1.aws.redhat.com quadlet-demo[59977]: Pods removed:
Aug 10 12:48:00 ip-10-31-12-150.us-east-1.aws.redhat.com quadlet-demo[59977]: Secrets removed:
Aug 10 12:48:00 ip-10-31-12-150.us-east-1.aws.redhat.com quadlet-demo[59977]: Volumes removed:
Aug 10 12:48:00 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay-63bfc98854bdd51e7c8bc2680c0c2d789891cf939bb2ab02e8e4618d8dd4158d-merged.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay-63bfc98854bdd51e7c8bc2680c0c2d789891cf939bb2ab02e8e4618d8dd4158d-merged.mount has successfully entered the 'dead' state.
Aug 10 12:48:00 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Created slice machine.slice.
-- Subject: Unit machine.slice has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit machine.slice has finished starting up.
--
-- The start-up result is done.
Aug 10 12:48:00 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Created slice cgroup machine-libpod_pod_1ed91f1b3155c161f37b4af79292c92680658979b5207da0e9aa5d044d138d82.slice.
-- Subject: Unit machine-libpod_pod_1ed91f1b3155c161f37b4af79292c92680658979b5207da0e9aa5d044d138d82.slice has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit machine-libpod_pod_1ed91f1b3155c161f37b4af79292c92680658979b5207da0e9aa5d044d138d82.slice has finished starting up.
--
-- The start-up result is done.
Aug 10 12:48:00 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Started libcontainer container 50025198fd7aa43ff73de65d758f07df1bd5297b476b7f0f67cd04a2b89d0d06.
-- Subject: Unit libpod-50025198fd7aa43ff73de65d758f07df1bd5297b476b7f0f67cd04a2b89d0d06.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-50025198fd7aa43ff73de65d758f07df1bd5297b476b7f0f67cd04a2b89d0d06.scope has finished starting up.
--
-- The start-up result is done.
Aug 10 12:48:01 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: IPv6: ADDRCONF(NETDEV_UP): veth0bf0b8d3: link is not ready
Aug 10 12:48:01 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: cni-podman2: port 2(veth0bf0b8d3) entered blocking state
Aug 10 12:48:01 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: cni-podman2: port 2(veth0bf0b8d3) entered disabled state
Aug 10 12:48:01 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: device veth0bf0b8d3 entered promiscuous mode
Aug 10 12:48:01 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: cni-podman2: port 2(veth0bf0b8d3) entered blocking state
Aug 10 12:48:01 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: cni-podman2: port 2(veth0bf0b8d3) entered forwarding state
Aug 10 12:48:01 ip-10-31-12-150.us-east-1.aws.redhat.com systemd-udevd[60093]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable.
Aug 10 12:48:01 ip-10-31-12-150.us-east-1.aws.redhat.com systemd-udevd[60093]: Could not generate persistent MAC address for veth0bf0b8d3: No such file or directory
Aug 10 12:48:01 ip-10-31-12-150.us-east-1.aws.redhat.com NetworkManager[668]: [1723308481.0750] manager: (veth0bf0b8d3): new Veth device (/org/freedesktop/NetworkManager/Devices/7)
Aug 10 12:48:01 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: cni-podman2: port 2(veth0bf0b8d3) entered disabled state
Aug 10 12:48:01 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready
Aug 10 12:48:01 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready
Aug 10 12:48:01 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: IPv6: ADDRCONF(NETDEV_CHANGE): veth0bf0b8d3: link becomes ready
Aug 10 12:48:01 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: cni-podman2: port 2(veth0bf0b8d3) entered blocking state
Aug 10 12:48:01 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: cni-podman2: port 2(veth0bf0b8d3) entered forwarding state
Aug 10 12:48:01 ip-10-31-12-150.us-east-1.aws.redhat.com NetworkManager[668]: [1723308481.0880] device (veth0bf0b8d3): carrier: link connected
Aug 10 12:48:01 ip-10-31-12-150.us-east-1.aws.redhat.com dnsmasq[56510]: read /run/containers/cni/dnsname/systemd-quadlet-demo/addnhosts - 2 addresses
Aug 10 12:48:01 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Started libcontainer container 98ced3336e84da56ca5efeb351fdbb256198556daee0a0824f67a9d4b2ae9df7.
-- Subject: Unit libpod-98ced3336e84da56ca5efeb351fdbb256198556daee0a0824f67a9d4b2ae9df7.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-98ced3336e84da56ca5efeb351fdbb256198556daee0a0824f67a9d4b2ae9df7.scope has finished starting up.
--
-- The start-up result is done.
Aug 10 12:48:01 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Started libcontainer container 2c53e6e470925f22df3a6166ce9b3cfae5d8d074321dfe02d01ddd82e92f3e9f.
-- Subject: Unit libpod-2c53e6e470925f22df3a6166ce9b3cfae5d8d074321dfe02d01ddd82e92f3e9f.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-2c53e6e470925f22df3a6166ce9b3cfae5d8d074321dfe02d01ddd82e92f3e9f.scope has finished starting up.
--
-- The start-up result is done.
Aug 10 12:48:01 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Started libcontainer container 37c04bc75a0ef90960c803bb327e7379002148b26a11f2def7ec3385c76da915.
-- Subject: Unit libpod-37c04bc75a0ef90960c803bb327e7379002148b26a11f2def7ec3385c76da915.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-37c04bc75a0ef90960c803bb327e7379002148b26a11f2def7ec3385c76da915.scope has finished starting up.
--
-- The start-up result is done.
Aug 10 12:48:01 ip-10-31-12-150.us-east-1.aws.redhat.com quadlet-demo[59977]: Volumes:
Aug 10 12:48:01 ip-10-31-12-150.us-east-1.aws.redhat.com quadlet-demo[59977]: wp-pv-claim
Aug 10 12:48:01 ip-10-31-12-150.us-east-1.aws.redhat.com quadlet-demo[59977]: Pod:
Aug 10 12:48:01 ip-10-31-12-150.us-east-1.aws.redhat.com quadlet-demo[59977]: 1ed91f1b3155c161f37b4af79292c92680658979b5207da0e9aa5d044d138d82
Aug 10 12:48:01 ip-10-31-12-150.us-east-1.aws.redhat.com quadlet-demo[59977]: Containers:
Aug 10 12:48:01 ip-10-31-12-150.us-east-1.aws.redhat.com quadlet-demo[59977]: 2c53e6e470925f22df3a6166ce9b3cfae5d8d074321dfe02d01ddd82e92f3e9f
Aug 10 12:48:01 ip-10-31-12-150.us-east-1.aws.redhat.com quadlet-demo[59977]: 37c04bc75a0ef90960c803bb327e7379002148b26a11f2def7ec3385c76da915
Aug 10 12:48:01 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Started quadlet-demo.service.
-- Subject: Unit quadlet-demo.service has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-demo.service has finished starting up.
--
-- The start-up result is done.
Aug 10 12:48:02 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[60455]: ansible-command Invoked with _raw_params=ls -alrtF /etc/containers/systemd warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:48:02 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[60640]: ansible-command Invoked with _raw_params=podman ps -a warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:48:03 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[60827]: ansible-command Invoked with _raw_params=podman volume ls warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:48:03 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[60957]: ansible-command Invoked with _raw_params=podman pod ps --ctr-ids --ctr-names --ctr-status warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:48:04 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[61087]: ansible-command Invoked with _raw_params=set -euo pipefail; systemctl list-units | grep quadlet _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:48:04 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[61213]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False sha256sum= checksum= timeout=10 follow=False unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None backup=None headers=None tmp_dest=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None src=None content=NOT_LOGGING_PARAMETER remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:48:05 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[61338]: ansible-command Invoked with _raw_params=cat /run/out warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:48:06 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[61462]: ansible-command Invoked with _raw_params=podman ps -a warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:48:06 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[61594]: ansible-command Invoked with _raw_params=podman pod ps --ctr-ids --ctr-names --ctr-status warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:48:07 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[61724]: ansible-command Invoked with _raw_params=set -euo pipefail; systemctl list-units --all | grep quadlet _uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:48:07 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[61850]: ansible-command Invoked with _raw_params=ls -alrtF /etc/systemd/system warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:48:09 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[62099]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:48:10 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[62228]: ansible-getent Invoked with database=group key=0 fail_key=False service=None split=None
Aug 10 12:48:10 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[62352]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:48:12 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[62477]: ansible-dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True lock_timeout=30 conf_file=None disable_excludes=None download_dir=None list=None releasever=None
Aug 10 12:48:16 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[62604]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False no_block=False state=None enabled=None force=None user=None scope=None
Aug 10 12:48:16 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[62731]: ansible-systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False no_block=False force=None masked=None user=None scope=None
Aug 10 12:48:17 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[62858]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None
Aug 10 12:48:17 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[62981]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None
Aug 10 12:48:21 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[63495]: ansible-getent Invoked with database=group key=0 fail_key=False service=None split=None
Aug 10 12:48:21 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[63619]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:48:23 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[63744]: ansible-systemd Invoked with name=quadlet-demo.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None user=None
Aug 10 12:48:23 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Reloading.
Aug 10 12:48:23 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:48:23 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:48:23 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Stopping quadlet-demo.service...
-- Subject: Unit quadlet-demo.service has begun shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-demo.service has begun shutting down.
Aug 10 12:48:23 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: libpod-50025198fd7aa43ff73de65d758f07df1bd5297b476b7f0f67cd04a2b89d0d06.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-50025198fd7aa43ff73de65d758f07df1bd5297b476b7f0f67cd04a2b89d0d06.scope has successfully entered the 'dead' state.
Aug 10 12:48:23 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: libpod-50025198fd7aa43ff73de65d758f07df1bd5297b476b7f0f67cd04a2b89d0d06.scope: Consumed 39ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-50025198fd7aa43ff73de65d758f07df1bd5297b476b7f0f67cd04a2b89d0d06.scope completed and consumed the indicated resources.
Aug 10 12:48:23 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-50025198fd7aa43ff73de65d758f07df1bd5297b476b7f0f67cd04a2b89d0d06-userdata-shm.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay\x2dcontainers-50025198fd7aa43ff73de65d758f07df1bd5297b476b7f0f67cd04a2b89d0d06-userdata-shm.mount has successfully entered the 'dead' state.
Aug 10 12:48:23 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay-bd3869123cee4022d3bcf5f4fb0f1958556f6db6c0213b92327bbbda5ebb2dc6-merged.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay-bd3869123cee4022d3bcf5f4fb0f1958556f6db6c0213b92327bbbda5ebb2dc6-merged.mount has successfully entered the 'dead' state.
Aug 10 12:48:23 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: libpod-98ced3336e84da56ca5efeb351fdbb256198556daee0a0824f67a9d4b2ae9df7.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-98ced3336e84da56ca5efeb351fdbb256198556daee0a0824f67a9d4b2ae9df7.scope has successfully entered the 'dead' state.
Aug 10 12:48:23 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: libpod-98ced3336e84da56ca5efeb351fdbb256198556daee0a0824f67a9d4b2ae9df7.scope: Consumed 36ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-98ced3336e84da56ca5efeb351fdbb256198556daee0a0824f67a9d4b2ae9df7.scope completed and consumed the indicated resources.
Aug 10 12:48:23 ip-10-31-12-150.us-east-1.aws.redhat.com conmon[60184]: conmon 98ced3336e84da56ca5e : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_1ed91f1b3155c161f37b4af79292c92680658979b5207da0e9aa5d044d138d82.slice/libpod-98ced3336e84da56ca5efeb351fdbb256198556daee0a0824f67a9d4b2ae9df7.scope/memory.events
Aug 10 12:48:23 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: libpod-37c04bc75a0ef90960c803bb327e7379002148b26a11f2def7ec3385c76da915.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-37c04bc75a0ef90960c803bb327e7379002148b26a11f2def7ec3385c76da915.scope has successfully entered the 'dead' state.
Aug 10 12:48:23 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: libpod-37c04bc75a0ef90960c803bb327e7379002148b26a11f2def7ec3385c76da915.scope: Consumed 136ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-37c04bc75a0ef90960c803bb327e7379002148b26a11f2def7ec3385c76da915.scope completed and consumed the indicated resources.
Aug 10 12:48:23 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: libpod-2c53e6e470925f22df3a6166ce9b3cfae5d8d074321dfe02d01ddd82e92f3e9f.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-2c53e6e470925f22df3a6166ce9b3cfae5d8d074321dfe02d01ddd82e92f3e9f.scope has successfully entered the 'dead' state.
Aug 10 12:48:23 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: libpod-2c53e6e470925f22df3a6166ce9b3cfae5d8d074321dfe02d01ddd82e92f3e9f.scope: Consumed 1.221s CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-2c53e6e470925f22df3a6166ce9b3cfae5d8d074321dfe02d01ddd82e92f3e9f.scope completed and consumed the indicated resources.
Aug 10 12:48:23 ip-10-31-12-150.us-east-1.aws.redhat.com dnsmasq[56510]: read /run/containers/cni/dnsname/systemd-quadlet-demo/addnhosts - 1 addresses
Aug 10 12:48:23 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay-0cc8b37dcd4616059e4b057bc3a8ff8cce752fd945cc5160d8c6c68b1bc33bec-merged.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay-0cc8b37dcd4616059e4b057bc3a8ff8cce752fd945cc5160d8c6c68b1bc33bec-merged.mount has successfully entered the 'dead' state.
Aug 10 12:48:23 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay-8250af18dbd2ddb576c919a3f1cd1e17a066f13e39e46298046c6f8bee3863a3-merged.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay-8250af18dbd2ddb576c919a3f1cd1e17a066f13e39e46298046c6f8bee3863a3-merged.mount has successfully entered the 'dead' state.
Aug 10 12:48:23 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: cni-podman2: port 2(veth0bf0b8d3) entered disabled state
Aug 10 12:48:23 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: device veth0bf0b8d3 left promiscuous mode
Aug 10 12:48:23 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: cni-podman2: port 2(veth0bf0b8d3) entered disabled state
Aug 10 12:48:23 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: run-netns-netns\x2d7bcc5651\x2d05c2\x2da328\x2d6de9\x2da2feedf9de76.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit run-netns-netns\x2d7bcc5651\x2d05c2\x2da328\x2d6de9\x2da2feedf9de76.mount has successfully entered the 'dead' state.
Aug 10 12:48:23 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Removed slice cgroup machine-libpod_pod_1ed91f1b3155c161f37b4af79292c92680658979b5207da0e9aa5d044d138d82.slice.
-- Subject: Unit machine-libpod_pod_1ed91f1b3155c161f37b4af79292c92680658979b5207da0e9aa5d044d138d82.slice has finished shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit machine-libpod_pod_1ed91f1b3155c161f37b4af79292c92680658979b5207da0e9aa5d044d138d82.slice has finished shutting down.
Aug 10 12:48:23 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: machine-libpod_pod_1ed91f1b3155c161f37b4af79292c92680658979b5207da0e9aa5d044d138d82.slice: Consumed 1.393s CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit machine-libpod_pod_1ed91f1b3155c161f37b4af79292c92680658979b5207da0e9aa5d044d138d82.slice completed and consumed the indicated resources.
Aug 10 12:48:23 ip-10-31-12-150.us-east-1.aws.redhat.com quadlet-demo[63793]: Pods stopped:
Aug 10 12:48:23 ip-10-31-12-150.us-east-1.aws.redhat.com quadlet-demo[63793]: 1ed91f1b3155c161f37b4af79292c92680658979b5207da0e9aa5d044d138d82
Aug 10 12:48:23 ip-10-31-12-150.us-east-1.aws.redhat.com quadlet-demo[63793]: Pods removed:
Aug 10 12:48:23 ip-10-31-12-150.us-east-1.aws.redhat.com quadlet-demo[63793]: 1ed91f1b3155c161f37b4af79292c92680658979b5207da0e9aa5d044d138d82
Aug 10 12:48:23 ip-10-31-12-150.us-east-1.aws.redhat.com quadlet-demo[63793]: Secrets removed:
Aug 10 12:48:23 ip-10-31-12-150.us-east-1.aws.redhat.com quadlet-demo[63793]: Volumes removed:
Aug 10 12:48:23 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: quadlet-demo.service: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit quadlet-demo.service has successfully entered the 'dead' state.
Aug 10 12:48:23 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Stopped quadlet-demo.service.
-- Subject: Unit quadlet-demo.service has finished shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-demo.service has finished shutting down.
Aug 10 12:48:24 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[64092]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo.kube follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:48:24 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay-de3885d69fd460bea34a50145b5e67e34f0f51a20f28ddc18eaa2343f1152318-merged.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay-de3885d69fd460bea34a50145b5e67e34f0f51a20f28ddc18eaa2343f1152318-merged.mount has successfully entered the 'dead' state.
Aug 10 12:48:24 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-98ced3336e84da56ca5efeb351fdbb256198556daee0a0824f67a9d4b2ae9df7-userdata-shm.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay\x2dcontainers-98ced3336e84da56ca5efeb351fdbb256198556daee0a0824f67a9d4b2ae9df7-userdata-shm.mount has successfully entered the 'dead' state.
Aug 10 12:48:25 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[64340]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo.kube state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:48:25 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[64463]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None
Aug 10 12:48:25 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Reloading.
Aug 10 12:48:25 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:48:25 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:48:26 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[64616]: ansible-command Invoked with _raw_params=podman image prune --all -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:48:27 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[64747]: ansible-command Invoked with _raw_params=podman images -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:48:28 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[64877]: ansible-command Invoked with _raw_params=podman volume ls -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:48:28 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[65007]: ansible-command Invoked with _raw_params=podman ps --noheading warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:48:28 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[65137]: ansible-command Invoked with _raw_params=podman network ls -n -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:48:29 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[65447]: ansible-service_facts Invoked
Aug 10 12:48:29 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Started /usr/bin/podman healthcheck run 77ecf6ddf9f0a56506ba9dd67ea2af32e2713127dc8d66f8b9853e38f9021fc6.
-- Subject: Unit 77ecf6ddf9f0a56506ba9dd67ea2af32e2713127dc8d66f8b9853e38f9021fc6.service has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit 77ecf6ddf9f0a56506ba9dd67ea2af32e2713127dc8d66f8b9853e38f9021fc6.service has finished starting up.
--
-- The start-up result is done.
Aug 10 12:48:30 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: 77ecf6ddf9f0a56506ba9dd67ea2af32e2713127dc8d66f8b9853e38f9021fc6.service: Main process exited, code=exited, status=125/n/a
Aug 10 12:48:30 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: 77ecf6ddf9f0a56506ba9dd67ea2af32e2713127dc8d66f8b9853e38f9021fc6.service: Failed with result 'exit-code'.
-- Subject: Unit failed
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit 77ecf6ddf9f0a56506ba9dd67ea2af32e2713127dc8d66f8b9853e38f9021fc6.service has entered the 'failed' state with result 'exit-code'.
Aug 10 12:48:32 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[65667]: ansible-getent Invoked with database=group key=0 fail_key=False service=None split=None
Aug 10 12:48:32 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[65791]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:48:33 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[65916]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:48:34 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[66164]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:48:35 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[66287]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None
Aug 10 12:48:35 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Reloading.
Aug 10 12:48:35 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:48:35 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:48:35 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[66440]: ansible-command Invoked with _raw_params=podman image prune --all -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:48:36 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[66570]: ansible-command Invoked with _raw_params=podman images -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:48:36 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[66700]: ansible-command Invoked with _raw_params=podman volume ls -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:48:37 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[66830]: ansible-command Invoked with _raw_params=podman ps --noheading warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:48:37 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[66960]: ansible-command Invoked with _raw_params=podman network ls -n -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:48:38 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[67271]: ansible-service_facts Invoked
Aug 10 12:48:40 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[67483]: ansible-getent Invoked with database=group key=0 fail_key=False service=None split=None
Aug 10 12:48:41 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[67607]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:48:42 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[67732]: ansible-stat Invoked with path=/etc/containers/systemd/envoy-proxy-configmap.yml follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:48:43 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[67980]: ansible-file Invoked with path=/etc/containers/systemd/envoy-proxy-configmap.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:48:43 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[68103]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None
Aug 10 12:48:43 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Reloading.
Aug 10 12:48:43 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:48:43 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:48:44 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[68256]: ansible-command Invoked with _raw_params=podman image prune --all -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:48:45 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[68386]: ansible-command Invoked with _raw_params=podman images -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:48:45 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[68516]: ansible-command Invoked with _raw_params=podman volume ls -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:48:45 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[68646]: ansible-command Invoked with _raw_params=podman ps --noheading warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:48:46 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[68776]: ansible-command Invoked with _raw_params=podman network ls -n -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:48:47 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[69086]: ansible-service_facts Invoked
Aug 10 12:48:49 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[69298]: ansible-getent Invoked with database=group key=0 fail_key=False service=None split=None
Aug 10 12:48:49 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[69422]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:48:51 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[69547]: ansible-systemd Invoked with name=quadlet-demo-mysql.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None user=None
Aug 10 12:48:51 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Reloading.
Aug 10 12:48:51 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:48:51 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:48:51 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Stopping quadlet-demo-mysql.service...
-- Subject: Unit quadlet-demo-mysql.service has begun shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-demo-mysql.service has begun shutting down.
Aug 10 12:48:52 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: 77ecf6ddf9f0a56506ba9dd67ea2af32e2713127dc8d66f8b9853e38f9021fc6.timer: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit 77ecf6ddf9f0a56506ba9dd67ea2af32e2713127dc8d66f8b9853e38f9021fc6.timer has successfully entered the 'dead' state.
Aug 10 12:48:52 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Stopped /usr/bin/podman healthcheck run 77ecf6ddf9f0a56506ba9dd67ea2af32e2713127dc8d66f8b9853e38f9021fc6.
-- Subject: Unit 77ecf6ddf9f0a56506ba9dd67ea2af32e2713127dc8d66f8b9853e38f9021fc6.timer has finished shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit 77ecf6ddf9f0a56506ba9dd67ea2af32e2713127dc8d66f8b9853e38f9021fc6.timer has finished shutting down.
Aug 10 12:48:52 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: cni-podman2: port 1(vethbde53e09) entered disabled state
Aug 10 12:48:52 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: device vethbde53e09 left promiscuous mode
Aug 10 12:48:52 ip-10-31-12-150.us-east-1.aws.redhat.com kernel: cni-podman2: port 1(vethbde53e09) entered disabled state
Aug 10 12:48:52 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: run-netns-netns\x2da73e7c85\x2deb66\x2dfc82\x2d9d8c\x2d2a7366959021.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit run-netns-netns\x2da73e7c85\x2deb66\x2dfc82\x2d9d8c\x2d2a7366959021.mount has successfully entered the 'dead' state.
Aug 10 12:48:52 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-77ecf6ddf9f0a56506ba9dd67ea2af32e2713127dc8d66f8b9853e38f9021fc6-userdata-shm.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay\x2dcontainers-77ecf6ddf9f0a56506ba9dd67ea2af32e2713127dc8d66f8b9853e38f9021fc6-userdata-shm.mount has successfully entered the 'dead' state.
Aug 10 12:48:52 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay-a837f56d03104787870ccc6589fb03f7f70b3bfa77614ce5491f92989b9a148f-merged.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay-a837f56d03104787870ccc6589fb03f7f70b3bfa77614ce5491f92989b9a148f-merged.mount has successfully entered the 'dead' state.
Aug 10 12:48:52 ip-10-31-12-150.us-east-1.aws.redhat.com quadlet-demo-mysql[69583]: 77ecf6ddf9f0a56506ba9dd67ea2af32e2713127dc8d66f8b9853e38f9021fc6
Aug 10 12:48:52 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:48:52 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: quadlet-demo-mysql.service: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit quadlet-demo-mysql.service has successfully entered the 'dead' state.
Aug 10 12:48:52 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Stopped quadlet-demo-mysql.service.
-- Subject: Unit quadlet-demo-mysql.service has finished shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-demo-mysql.service has finished shutting down.
Aug 10 12:48:53 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[69826]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.container follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:48:54 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[70074]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.container state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:48:54 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[70197]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None
Aug 10 12:48:54 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Reloading.
Aug 10 12:48:54 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:48:54 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:48:55 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[70480]: ansible-command Invoked with _raw_params=podman image prune --all -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:48:56 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:48:56 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[70611]: ansible-command Invoked with _raw_params=podman images -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:48:57 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[70741]: ansible-command Invoked with _raw_params=podman volume ls -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:48:57 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[70871]: ansible-command Invoked with _raw_params=podman ps --noheading warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:48:57 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[71001]: ansible-command Invoked with _raw_params=podman network ls -n -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:48:58 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:48:58 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[71312]: ansible-service_facts Invoked
Aug 10 12:49:01 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[71523]: ansible-getent Invoked with database=group key=0 fail_key=False service=None split=None
Aug 10 12:49:01 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[71647]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:49:02 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[71772]: ansible-systemd Invoked with name=quadlet-demo-mysql-volume.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None user=None
Aug 10 12:49:02 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Reloading.
Aug 10 12:49:02 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:49:02 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:49:02 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: quadlet-demo-mysql-volume.service: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit quadlet-demo-mysql-volume.service has successfully entered the 'dead' state.
Aug 10 12:49:02 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Stopped quadlet-demo-mysql-volume.service.
-- Subject: Unit quadlet-demo-mysql-volume.service has finished shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-demo-mysql-volume.service has finished shutting down.
Aug 10 12:49:03 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[71929]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.volume follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:49:04 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[72177]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.volume state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:49:04 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[72300]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None
Aug 10 12:49:04 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Reloading.
Aug 10 12:49:04 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:49:04 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:49:05 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:49:05 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[72583]: ansible-command Invoked with _raw_params=podman image prune --all -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:49:06 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[72713]: ansible-command Invoked with _raw_params=podman images -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:49:06 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[72844]: ansible-command Invoked with _raw_params=podman volume ls -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:49:07 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[72974]: ansible-command Invoked with _raw_params=podman ps --noheading warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:49:07 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[73104]: ansible-command Invoked with _raw_params=podman network ls -n -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:49:07 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:49:08 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[73414]: ansible-service_facts Invoked
Aug 10 12:49:10 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[73625]: ansible-getent Invoked with database=group key=0 fail_key=False service=None split=None
Aug 10 12:49:11 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[73749]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:49:12 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[73874]: ansible-systemd Invoked with name=quadlet-demo-network.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None user=None
Aug 10 12:49:12 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Reloading.
Aug 10 12:49:12 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:49:12 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:49:12 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: quadlet-demo-network.service: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit quadlet-demo-network.service has successfully entered the 'dead' state.
Aug 10 12:49:12 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Stopped quadlet-demo-network.service.
-- Subject: Unit quadlet-demo-network.service has finished shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-demo-network.service has finished shutting down.
Aug 10 12:49:12 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[74031]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo.network follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Aug 10 12:49:13 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[74279]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo.network state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None
Aug 10 12:49:14 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[74402]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None
Aug 10 12:49:14 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Reloading.
Aug 10 12:49:14 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:49:14 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument
Aug 10 12:49:14 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:49:15 ip-10-31-12-150.us-east-1.aws.redhat.com NetworkManager[668]: [1723308555.0000] device (cni-podman2): state change: activated -> unmanaged (reason 'unmanaged', sys-iface-state: 'removed')
Aug 10 12:49:15 ip-10-31-12-150.us-east-1.aws.redhat.com dbus-daemon[580]: [system] Activating via systemd: service name='org.freedesktop.nm_dispatcher' unit='dbus-org.freedesktop.nm-dispatcher.service' requested by ':1.3' (uid=0 pid=668 comm="/usr/sbin/NetworkManager --no-daemon " label="system_u:system_r:NetworkManager_t:s0")
Aug 10 12:49:15 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Starting Network Manager Script Dispatcher Service...
-- Subject: Unit NetworkManager-dispatcher.service has begun start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit NetworkManager-dispatcher.service has begun starting up.
Aug 10 12:49:15 ip-10-31-12-150.us-east-1.aws.redhat.com dbus-daemon[580]: [system] Successfully activated service 'org.freedesktop.nm_dispatcher'
Aug 10 12:49:15 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: Started Network Manager Script Dispatcher Service.
-- Subject: Unit NetworkManager-dispatcher.service has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit NetworkManager-dispatcher.service has finished starting up.
--
-- The start-up result is done.
Aug 10 12:49:15 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[74745]: ansible-command Invoked with _raw_params=podman image prune --all -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:49:15 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:49:16 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[74875]: ansible-command Invoked with _raw_params=podman images -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:49:16 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:49:16 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[75005]: ansible-command Invoked with _raw_params=podman volume ls -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:49:16 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[75135]: ansible-command Invoked with _raw_params=podman ps --noheading warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:49:17 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[75265]: ansible-command Invoked with _raw_params=podman network ls -n -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:49:17 ip-10-31-12-150.us-east-1.aws.redhat.com systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Aug 10 12:49:18 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[75550]: ansible-service_facts Invoked
Aug 10 12:49:20 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[75760]: ansible-command Invoked with _raw_params=exec 1>&2
set -x
set -o pipefail
systemctl list-units --plain -l --all | grep quadlet || :
systemctl list-unit-files --all | grep quadlet || :
systemctl list-units --plain --failed -l --all | grep quadlet || :
_uses_shell=True warn=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Aug 10 12:49:20 ip-10-31-12-150.us-east-1.aws.redhat.com platform-python[75890]: ansible-command Invoked with _raw_params=journalctl -ex warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
PLAY RECAP *********************************************************************
managed_node1 : ok=412 changed=42 unreachable=0 failed=1 skipped=368 rescued=1 ignored=0
Saturday 10 August 2024 12:49:21 -0400 (0:00:00.495) 0:02:44.895 *******
===============================================================================
fedora.linux_system_roles.podman : Ensure container images are present -- 17.88s
/tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
fedora.linux_system_roles.podman : Ensure container images are present --- 8.03s
/tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
fedora.linux_system_roles.certificate : Ensure certificate role dependencies are installed --- 4.07s
/tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:5
fedora.linux_system_roles.certificate : Ensure provider packages are installed --- 3.32s
/tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:23
fedora.linux_system_roles.firewall : Install firewalld ------------------ 3.29s
/tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31
fedora.linux_system_roles.firewall : Install firewalld ------------------ 3.09s
/tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31
fedora.linux_system_roles.podman : Stop and disable service ------------- 2.48s
/tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12
fedora.linux_system_roles.podman : Start service ------------------------ 1.90s
/tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110
fedora.linux_system_roles.podman : For testing and debugging - services --- 1.89s
/tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:183
fedora.linux_system_roles.podman : For testing and debugging - services --- 1.77s
/tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:183
fedora.linux_system_roles.podman : For testing and debugging - services --- 1.74s
/tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:183
fedora.linux_system_roles.podman : Gather the package facts ------------- 1.74s
/tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6
fedora.linux_system_roles.podman : For testing and debugging - services --- 1.70s
/tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:183
fedora.linux_system_roles.podman : For testing and debugging - services --- 1.68s
/tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:183
fedora.linux_system_roles.podman : For testing and debugging - services --- 1.67s
/tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:183
fedora.linux_system_roles.podman : Gather the package facts ------------- 1.52s
/tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6
fedora.linux_system_roles.podman : Stop and disable service ------------- 1.29s
/tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12
fedora.linux_system_roles.firewall : Configure firewall ----------------- 1.20s
/tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71
Check web --------------------------------------------------------------- 1.20s
/tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:109
fedora.linux_system_roles.certificate : Slurp the contents of the files --- 1.15s
/tmp/collections-xHF/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:152