ansible-playbook [core 2.17.8] config file = None configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/local/lib/python3.12/site-packages/ansible ansible collection location = /tmp/collections-WJe executable location = /usr/local/bin/ansible-playbook python version = 3.12.9 (main, Feb 4 2025, 00:00:00) [GCC 14.2.1 20250110 (Red Hat 14.2.1-7)] (/usr/bin/python3.12) jinja version = 3.1.5 libyaml = True No config file found; using defaults running playbook inside collection fedora.linux_system_roles redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.profile_tasks to ansible.posix.profile_tasks Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_quadlet_pod.yml ************************************************ 2 plays in /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml PLAY [all] ********************************************************************* TASK [Include vault variables] ************************************************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:5 Saturday 15 February 2025 11:43:02 -0500 (0:00:00.013) 0:00:00.013 ***** [WARNING]: Found variable using reserved name: q ok: [managed-node3] => { "ansible_facts": { "__podman_test_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n35383939616163653333633431363463313831383037386236646138333162396161356130303461\n3932623930643263313563336163316337643562333936360a363538636631313039343233383732\n38666530383538656639363465313230343533386130303833336434303438333161656262346562\n3362626538613031640a663330613638366132356534363534353239616666653466353961323533\n6565\n" }, "mysql_container_root_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n61333932373230333539663035366431326163363166363036323963623131363530326231303634\n6635326161643165363366323062333334363730376631660a393566366139353861656364656661\n38653463363837336639363032646433666361646535366137303464623261313663643336306465\n6264663730656337310a343962353137386238383064646533366433333437303566656433386233\n34343235326665646661623131643335313236313131353661386338343366316261643634653633\n3832313034366536616531323963333234326461353130303532\n" } }, "ansible_included_var_files": [ "/tmp/podman-qfn/tests/vars/vault-variables.yml" ], "changed": false } PLAY [Ensure that the role can manage quadlet pods] **************************** TASK [Gathering Facts] ********************************************************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:9 Saturday 15 February 2025 11:43:02 -0500 (0:00:00.041) 0:00:00.055 ***** [WARNING]: Platform linux on host managed-node3 is using the discovered Python interpreter at /usr/bin/python3.12, but future installation of another Python interpreter could change the meaning of that path. See https://docs.ansible.com/ansible- core/2.17/reference_appendices/interpreter_discovery.html for more information. ok: [managed-node3] TASK [Run the role - root] ***************************************************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:34 Saturday 15 February 2025 11:43:03 -0500 (0:00:01.384) 0:00:01.440 ***** included: fedora.linux_system_roles.podman for managed-node3 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 15 February 2025 11:43:03 -0500 (0:00:00.061) 0:00:01.501 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 15 February 2025 11:43:03 -0500 (0:00:00.031) 0:00:01.532 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 15 February 2025 11:43:03 -0500 (0:00:00.049) 0:00:01.582 ***** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 15 February 2025 11:43:04 -0500 (0:00:00.460) 0:00:02.042 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 15 February 2025 11:43:04 -0500 (0:00:00.025) 0:00:02.067 ***** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 15 February 2025 11:43:04 -0500 (0:00:00.367) 0:00:02.435 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 15 February 2025 11:43:04 -0500 (0:00:00.023) 0:00:02.459 ***** ok: [managed-node3] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node3] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node3] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node3] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 15 February 2025 11:43:04 -0500 (0:00:00.049) 0:00:02.508 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 15 February 2025 11:43:05 -0500 (0:00:01.057) 0:00:03.566 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 15 February 2025 11:43:05 -0500 (0:00:00.051) 0:00:03.617 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 15 February 2025 11:43:05 -0500 (0:00:00.067) 0:00:03.685 ***** skipping: [managed-node3] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 15 February 2025 11:43:05 -0500 (0:00:00.072) 0:00:03.757 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 15 February 2025 11:43:05 -0500 (0:00:00.071) 0:00:03.829 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 15 February 2025 11:43:05 -0500 (0:00:00.072) 0:00:03.901 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.029534", "end": "2025-02-15 11:43:06.442162", "rc": 0, "start": "2025-02-15 11:43:06.412628" } STDOUT: podman version 5.3.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 15 February 2025 11:43:06 -0500 (0:00:00.582) 0:00:04.483 ***** ok: [managed-node3] => { "ansible_facts": { "podman_version": "5.3.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 15 February 2025 11:43:06 -0500 (0:00:00.057) 0:00:04.541 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 15 February 2025 11:43:06 -0500 (0:00:00.041) 0:00:04.582 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 15 February 2025 11:43:06 -0500 (0:00:00.062) 0:00:04.645 ***** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 15 February 2025 11:43:06 -0500 (0:00:00.058) 0:00:04.703 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"5.0\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 15 February 2025 11:43:06 -0500 (0:00:00.050) 0:00:04.753 ***** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 15 February 2025 11:43:06 -0500 (0:00:00.047) 0:00:04.801 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 15 February 2025 11:43:06 -0500 (0:00:00.086) 0:00:04.887 ***** ok: [managed-node3] => { "ansible_facts": { "getent_passwd": { "root": [ "x", "0", "0", "Super User", "/root", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 15 February 2025 11:43:07 -0500 (0:00:00.477) 0:00:05.364 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 15 February 2025 11:43:07 -0500 (0:00:00.057) 0:00:05.422 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 15 February 2025 11:43:07 -0500 (0:00:00.061) 0:00:05.484 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637482.3586853, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1739637469.6276166, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9120230, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "657259727", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 15 February 2025 11:43:07 -0500 (0:00:00.403) 0:00:05.887 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 15 February 2025 11:43:07 -0500 (0:00:00.035) 0:00:05.922 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 15 February 2025 11:43:08 -0500 (0:00:00.038) 0:00:05.961 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 15 February 2025 11:43:08 -0500 (0:00:00.033) 0:00:05.995 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 15 February 2025 11:43:08 -0500 (0:00:00.030) 0:00:06.026 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 15 February 2025 11:43:08 -0500 (0:00:00.030) 0:00:06.056 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 15 February 2025 11:43:08 -0500 (0:00:00.031) 0:00:06.087 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 15 February 2025 11:43:08 -0500 (0:00:00.030) 0:00:06.117 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 15 February 2025 11:43:08 -0500 (0:00:00.031) 0:00:06.148 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Saturday 15 February 2025 11:43:08 -0500 (0:00:00.058) 0:00:06.206 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 15 February 2025 11:43:08 -0500 (0:00:00.089) 0:00:06.296 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 15 February 2025 11:43:08 -0500 (0:00:00.050) 0:00:06.347 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Saturday 15 February 2025 11:43:08 -0500 (0:00:00.049) 0:00:06.396 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 15 February 2025 11:43:08 -0500 (0:00:00.096) 0:00:06.492 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 15 February 2025 11:43:08 -0500 (0:00:00.091) 0:00:06.584 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Saturday 15 February 2025 11:43:08 -0500 (0:00:00.050) 0:00:06.634 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Saturday 15 February 2025 11:43:08 -0500 (0:00:00.090) 0:00:06.724 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Saturday 15 February 2025 11:43:08 -0500 (0:00:00.048) 0:00:06.772 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Saturday 15 February 2025 11:43:08 -0500 (0:00:00.047) 0:00:06.820 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Saturday 15 February 2025 11:43:08 -0500 (0:00:00.097) 0:00:06.918 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Saturday 15 February 2025 11:43:09 -0500 (0:00:00.049) 0:00:06.967 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Saturday 15 February 2025 11:43:09 -0500 (0:00:00.047) 0:00:07.014 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Saturday 15 February 2025 11:43:09 -0500 (0:00:00.048) 0:00:07.063 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Saturday 15 February 2025 11:43:09 -0500 (0:00:00.041) 0:00:07.104 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Saturday 15 February 2025 11:43:09 -0500 (0:00:00.034) 0:00:07.139 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Saturday 15 February 2025 11:43:09 -0500 (0:00:00.035) 0:00:07.175 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Saturday 15 February 2025 11:43:09 -0500 (0:00:00.036) 0:00:07.212 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Saturday 15 February 2025 11:43:09 -0500 (0:00:00.030) 0:00:07.242 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Saturday 15 February 2025 11:43:09 -0500 (0:00:00.026) 0:00:07.269 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Saturday 15 February 2025 11:43:09 -0500 (0:00:00.027) 0:00:07.297 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Saturday 15 February 2025 11:43:09 -0500 (0:00:00.031) 0:00:07.328 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 15 February 2025 11:43:09 -0500 (0:00:00.083) 0:00:07.412 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Pod": { "PodName": "quadlet-pod" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 15 February 2025 11:43:09 -0500 (0:00:00.095) 0:00:07.507 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 15 February 2025 11:43:09 -0500 (0:00:00.063) 0:00:07.571 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 15 February 2025 11:43:09 -0500 (0:00:00.049) 0:00:07.621 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-pod", "__podman_quadlet_type": "pod", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 15 February 2025 11:43:09 -0500 (0:00:00.072) 0:00:07.693 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 15 February 2025 11:43:09 -0500 (0:00:00.071) 0:00:07.765 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 15 February 2025 11:43:09 -0500 (0:00:00.040) 0:00:07.805 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 15 February 2025 11:43:09 -0500 (0:00:00.038) 0:00:07.844 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 15 February 2025 11:43:09 -0500 (0:00:00.050) 0:00:07.894 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637482.3586853, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1739637469.6276166, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9120230, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "657259727", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 15 February 2025 11:43:10 -0500 (0:00:00.403) 0:00:08.298 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 15 February 2025 11:43:10 -0500 (0:00:00.057) 0:00:08.356 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 15 February 2025 11:43:10 -0500 (0:00:00.047) 0:00:08.403 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 15 February 2025 11:43:10 -0500 (0:00:00.049) 0:00:08.453 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 15 February 2025 11:43:10 -0500 (0:00:00.048) 0:00:08.502 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 15 February 2025 11:43:10 -0500 (0:00:00.051) 0:00:08.553 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 15 February 2025 11:43:10 -0500 (0:00:00.049) 0:00:08.603 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 15 February 2025 11:43:10 -0500 (0:00:00.048) 0:00:08.651 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 15 February 2025 11:43:10 -0500 (0:00:00.046) 0:00:08.698 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-pod-pod.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 15 February 2025 11:43:10 -0500 (0:00:00.082) 0:00:08.780 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 15 February 2025 11:43:10 -0500 (0:00:00.050) 0:00:08.830 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 15 February 2025 11:43:10 -0500 (0:00:00.047) 0:00:08.878 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-pod-pod.pod", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 15 February 2025 11:43:11 -0500 (0:00:00.121) 0:00:09.000 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 15 February 2025 11:43:11 -0500 (0:00:00.109) 0:00:09.109 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 15 February 2025 11:43:11 -0500 (0:00:00.045) 0:00:09.154 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 15 February 2025 11:43:11 -0500 (0:00:00.108) 0:00:09.263 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 15 February 2025 11:43:11 -0500 (0:00:00.087) 0:00:09.350 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 15 February 2025 11:43:11 -0500 (0:00:00.052) 0:00:09.403 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 15 February 2025 11:43:11 -0500 (0:00:00.047) 0:00:09.450 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 15 February 2025 11:43:11 -0500 (0:00:00.049) 0:00:09.499 ***** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 15 February 2025 11:43:11 -0500 (0:00:00.042) 0:00:09.542 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 15 February 2025 11:43:11 -0500 (0:00:00.047) 0:00:09.589 ***** ok: [managed-node3] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 15 February 2025 11:43:12 -0500 (0:00:00.591) 0:00:10.180 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 15 February 2025 11:43:12 -0500 (0:00:00.043) 0:00:10.224 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 15 February 2025 11:43:12 -0500 (0:00:00.055) 0:00:10.279 ***** changed: [managed-node3] => { "changed": true, "checksum": "1884c880482430d8bf2e944b003734fb8b7a462d", "dest": "/etc/containers/systemd/quadlet-pod-pod.pod", "gid": 0, "group": "root", "md5sum": "43c9e9c2ff3ad9cd27c1f2d12f03aee0", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 70, "src": "/root/.ansible/tmp/ansible-tmp-1739637792.3790746-22752-149265129293098/.source.pod", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 15 February 2025 11:43:13 -0500 (0:00:00.922) 0:00:11.202 ***** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 15 February 2025 11:43:14 -0500 (0:00:00.999) 0:00:12.202 ***** changed: [managed-node3] => { "changed": true, "name": "quadlet-pod-pod-pod.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "systemd-journald.socket system.slice -.mount network-online.target basic.target sysinit.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-pod-pod-pod.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698225152", "EffectiveMemoryMax": "3698225152", "EffectiveTasksMax": "22347", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/quadlet-pod-pod-pod.pod-id ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/quadlet-pod-pod-pod.pod-id ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPre": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/quadlet-pod-pod-pod.pid --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPreEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/quadlet-pod-pod-pod.pid --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --force ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --force ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-pod-pod-pod.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-pod-pod.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13967", "LimitNPROCSoft": "13967", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13967", "LimitSIGPENDINGSoft": "13967", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3244728320", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-pod-pod.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/quadlet-pod-pod-pod.pid", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target system.slice -.mount", "RequiresMountsFor": "/run/containers", "Restart": "on-failure", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-pod-pod.pod", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-pod-pod", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22347", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "forking", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 15 February 2025 11:43:15 -0500 (0:00:01.203) 0:00:13.406 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 15 February 2025 11:43:15 -0500 (0:00:00.037) 0:00:13.443 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "quadlet-pod-container", "Exec": "/bin/busybox-extras httpd -f -p 80", "Image": "quay.io/libpod/testimage:20210610", "Pod": "quadlet-pod-pod.pod" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 15 February 2025 11:43:15 -0500 (0:00:00.040) 0:00:13.484 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 15 February 2025 11:43:15 -0500 (0:00:00.039) 0:00:13.523 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 15 February 2025 11:43:15 -0500 (0:00:00.029) 0:00:13.553 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-container", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 15 February 2025 11:43:15 -0500 (0:00:00.104) 0:00:13.658 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 15 February 2025 11:43:15 -0500 (0:00:00.069) 0:00:13.727 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 15 February 2025 11:43:15 -0500 (0:00:00.040) 0:00:13.767 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 15 February 2025 11:43:15 -0500 (0:00:00.040) 0:00:13.808 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 15 February 2025 11:43:15 -0500 (0:00:00.044) 0:00:13.852 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637482.3586853, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1739637469.6276166, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9120230, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "657259727", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 15 February 2025 11:43:16 -0500 (0:00:00.404) 0:00:14.257 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 15 February 2025 11:43:16 -0500 (0:00:00.050) 0:00:14.308 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 15 February 2025 11:43:16 -0500 (0:00:00.053) 0:00:14.361 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 15 February 2025 11:43:16 -0500 (0:00:00.051) 0:00:14.412 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 15 February 2025 11:43:16 -0500 (0:00:00.070) 0:00:14.482 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 15 February 2025 11:43:16 -0500 (0:00:00.050) 0:00:14.533 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 15 February 2025 11:43:16 -0500 (0:00:00.053) 0:00:14.586 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 15 February 2025 11:43:16 -0500 (0:00:00.050) 0:00:14.637 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 15 February 2025 11:43:16 -0500 (0:00:00.054) 0:00:14.691 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/libpod/testimage:20210610" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-container.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 15 February 2025 11:43:16 -0500 (0:00:00.094) 0:00:14.785 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 15 February 2025 11:43:16 -0500 (0:00:00.059) 0:00:14.845 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 15 February 2025 11:43:16 -0500 (0:00:00.070) 0:00:14.915 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [ "quay.io/libpod/testimage:20210610" ], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-pod-container.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 15 February 2025 11:43:17 -0500 (0:00:00.140) 0:00:15.056 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 15 February 2025 11:43:17 -0500 (0:00:00.061) 0:00:15.117 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 15 February 2025 11:43:17 -0500 (0:00:00.053) 0:00:15.170 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 15 February 2025 11:43:17 -0500 (0:00:00.129) 0:00:15.300 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 15 February 2025 11:43:17 -0500 (0:00:00.058) 0:00:15.359 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 15 February 2025 11:43:17 -0500 (0:00:00.031) 0:00:15.391 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 15 February 2025 11:43:17 -0500 (0:00:00.030) 0:00:15.421 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 15 February 2025 11:43:17 -0500 (0:00:00.029) 0:00:15.450 ***** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 15 February 2025 11:43:17 -0500 (0:00:00.027) 0:00:15.478 ***** changed: [managed-node3] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 15 February 2025 11:43:19 -0500 (0:00:01.780) 0:00:17.258 ***** ok: [managed-node3] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 33, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 15 February 2025 11:43:19 -0500 (0:00:00.440) 0:00:17.699 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 15 February 2025 11:43:19 -0500 (0:00:00.056) 0:00:17.756 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 15 February 2025 11:43:19 -0500 (0:00:00.051) 0:00:17.807 ***** changed: [managed-node3] => { "changed": true, "checksum": "f0b5c8159fc3c65bf9310a371751609e4c1ba4c3", "dest": "/etc/containers/systemd/quadlet-pod-container.container", "gid": 0, "group": "root", "md5sum": "daaf6e904ff3c17edeb801084cfe256f", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 230, "src": "/root/.ansible/tmp/ansible-tmp-1739637799.9090035-22970-70971864428544/.source.container", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 15 February 2025 11:43:20 -0500 (0:00:00.751) 0:00:18.558 ***** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 15 February 2025 11:43:21 -0500 (0:00:00.789) 0:00:19.348 ***** changed: [managed-node3] => { "changed": true, "name": "quadlet-pod-container.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "quadlet-pod-pod-pod.service -.mount systemd-journald.socket system.slice basic.target network-online.target sysinit.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target multi-user.target", "BindLogSockets": "no", "BindsTo": "quadlet-pod-pod-pod.service", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuset io memory pids", "Description": "quadlet-pod-container.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698225152", "EffectiveMemoryMax": "3698225152", "EffectiveTasksMax": "22347", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-pod-container.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-pod-container.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-pod-container.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-pod-container.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-pod-container.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-container.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13967", "LimitNPROCSoft": "13967", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13967", "LimitSIGPENDINGSoft": "13967", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3224195072", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-container.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "continue", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target -.mount system.slice", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-pod-container.container", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-container", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22347", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "multi-user.target quadlet-pod-pod-pod.service", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 15 February 2025 11:43:22 -0500 (0:00:00.782) 0:00:20.131 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Saturday 15 February 2025 11:43:22 -0500 (0:00:00.049) 0:00:20.181 ***** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Saturday 15 February 2025 11:43:22 -0500 (0:00:00.033) 0:00:20.214 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Saturday 15 February 2025 11:43:22 -0500 (0:00:00.042) 0:00:20.257 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Check files] ************************************************************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:40 Saturday 15 February 2025 11:43:22 -0500 (0:00:00.053) 0:00:20.311 ***** ok: [managed-node3] => (item=quadlet-pod-container.container) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "cat", "/etc/containers/systemd/quadlet-pod-container.container" ], "delta": "0:00:01.004303", "end": "2025-02-15 11:43:23.671767", "item": "quadlet-pod-container.container", "rc": 0, "start": "2025-02-15 11:43:22.667464" } STDOUT: # # Ansible managed # # system_role:podman [Install] WantedBy=default.target [Container] Image=quay.io/libpod/testimage:20210610 ContainerName=quadlet-pod-container Pod=quadlet-pod-pod.pod Exec=/bin/busybox-extras httpd -f -p 80 ok: [managed-node3] => (item=quadlet-pod-pod.pod) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "cat", "/etc/containers/systemd/quadlet-pod-pod.pod" ], "delta": "0:00:00.003167", "end": "2025-02-15 11:43:24.080877", "item": "quadlet-pod-pod.pod", "rc": 0, "start": "2025-02-15 11:43:24.077710" } STDOUT: # # Ansible managed # # system_role:podman [Pod] PodName=quadlet-pod TASK [Check pod] *************************************************************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:49 Saturday 15 February 2025 11:43:24 -0500 (0:00:01.814) 0:00:22.125 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "pod", "inspect", "quadlet-pod", "--format", "{{range .Containers}}{{.Name}}\n{{end}}" ], "delta": "0:00:00.042374", "end": "2025-02-15 11:43:24.546371", "failed_when_result": false, "rc": 0, "start": "2025-02-15 11:43:24.503997" } STDOUT: quadlet-pod-infra quadlet-pod-container TASK [Create user for testing] ************************************************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:57 Saturday 15 February 2025 11:43:24 -0500 (0:00:00.467) 0:00:22.592 ***** changed: [managed-node3] => { "changed": true, "comment": "", "create_home": true, "group": 2223, "home": "/home/user_quadlet_pod", "name": "user_quadlet_pod", "shell": "/bin/bash", "state": "present", "system": false, "uid": 2223 } TASK [Run the role - user] ***************************************************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:62 Saturday 15 February 2025 11:43:25 -0500 (0:00:00.781) 0:00:23.374 ***** included: fedora.linux_system_roles.podman for managed-node3 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 15 February 2025 11:43:25 -0500 (0:00:00.164) 0:00:23.539 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 15 February 2025 11:43:25 -0500 (0:00:00.081) 0:00:23.620 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 15 February 2025 11:43:25 -0500 (0:00:00.078) 0:00:23.699 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 15 February 2025 11:43:25 -0500 (0:00:00.085) 0:00:23.785 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 15 February 2025 11:43:25 -0500 (0:00:00.072) 0:00:23.857 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 15 February 2025 11:43:25 -0500 (0:00:00.058) 0:00:23.915 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 15 February 2025 11:43:26 -0500 (0:00:00.061) 0:00:23.976 ***** ok: [managed-node3] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node3] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node3] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node3] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 15 February 2025 11:43:26 -0500 (0:00:00.126) 0:00:24.103 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 15 February 2025 11:43:27 -0500 (0:00:00.898) 0:00:25.002 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 15 February 2025 11:43:27 -0500 (0:00:00.036) 0:00:25.038 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 15 February 2025 11:43:27 -0500 (0:00:00.033) 0:00:25.072 ***** skipping: [managed-node3] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 15 February 2025 11:43:27 -0500 (0:00:00.031) 0:00:25.104 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 15 February 2025 11:43:27 -0500 (0:00:00.030) 0:00:25.134 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 15 February 2025 11:43:27 -0500 (0:00:00.032) 0:00:25.166 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.027410", "end": "2025-02-15 11:43:27.543452", "rc": 0, "start": "2025-02-15 11:43:27.516042" } STDOUT: podman version 5.3.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 15 February 2025 11:43:27 -0500 (0:00:00.400) 0:00:25.567 ***** ok: [managed-node3] => { "ansible_facts": { "podman_version": "5.3.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 15 February 2025 11:43:27 -0500 (0:00:00.074) 0:00:25.641 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 15 February 2025 11:43:27 -0500 (0:00:00.029) 0:00:25.671 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 15 February 2025 11:43:27 -0500 (0:00:00.052) 0:00:25.723 ***** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 15 February 2025 11:43:27 -0500 (0:00:00.046) 0:00:25.770 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"5.0\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 15 February 2025 11:43:27 -0500 (0:00:00.092) 0:00:25.862 ***** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 15 February 2025 11:43:28 -0500 (0:00:00.111) 0:00:25.973 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 15 February 2025 11:43:28 -0500 (0:00:00.167) 0:00:26.141 ***** ok: [managed-node3] => { "ansible_facts": { "getent_passwd": { "user_quadlet_pod": [ "x", "2223", "2223", "", "/home/user_quadlet_pod", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 15 February 2025 11:43:28 -0500 (0:00:00.429) 0:00:26.570 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 15 February 2025 11:43:28 -0500 (0:00:00.058) 0:00:26.629 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 15 February 2025 11:43:28 -0500 (0:00:00.066) 0:00:26.695 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637482.3586853, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1739637469.6276166, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9120230, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "657259727", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 15 February 2025 11:43:29 -0500 (0:00:00.449) 0:00:27.145 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.004061", "end": "2025-02-15 11:43:29.565958", "rc": 0, "start": "2025-02-15 11:43:29.561897" } STDOUT: 0: user_quadlet_pod 655360 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 15 February 2025 11:43:29 -0500 (0:00:00.446) 0:00:27.592 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.005871", "end": "2025-02-15 11:43:30.022307", "rc": 0, "start": "2025-02-15 11:43:30.016436" } STDOUT: 0: user_quadlet_pod 655360 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 15 February 2025 11:43:30 -0500 (0:00:00.453) 0:00:28.046 ***** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 655360 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 655360 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 15 February 2025 11:43:30 -0500 (0:00:00.047) 0:00:28.093 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 15 February 2025 11:43:30 -0500 (0:00:00.045) 0:00:28.138 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 15 February 2025 11:43:30 -0500 (0:00:00.052) 0:00:28.191 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 15 February 2025 11:43:30 -0500 (0:00:00.078) 0:00:28.270 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 15 February 2025 11:43:30 -0500 (0:00:00.061) 0:00:28.332 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 15 February 2025 11:43:30 -0500 (0:00:00.073) 0:00:28.405 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_container_conf_file": "/root/.config/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/root/.config/containers/policy.json", "__podman_registries_conf_file": "/root/.config/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/root/.config/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Saturday 15 February 2025 11:43:30 -0500 (0:00:00.123) 0:00:28.528 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 15 February 2025 11:43:30 -0500 (0:00:00.094) 0:00:28.623 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 15 February 2025 11:43:30 -0500 (0:00:00.054) 0:00:28.678 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Saturday 15 February 2025 11:43:30 -0500 (0:00:00.052) 0:00:28.731 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 15 February 2025 11:43:30 -0500 (0:00:00.119) 0:00:28.850 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 15 February 2025 11:43:30 -0500 (0:00:00.052) 0:00:28.903 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Saturday 15 February 2025 11:43:31 -0500 (0:00:00.053) 0:00:28.957 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Saturday 15 February 2025 11:43:31 -0500 (0:00:00.103) 0:00:29.060 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Saturday 15 February 2025 11:43:31 -0500 (0:00:00.049) 0:00:29.109 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Saturday 15 February 2025 11:43:31 -0500 (0:00:00.052) 0:00:29.162 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Saturday 15 February 2025 11:43:31 -0500 (0:00:00.126) 0:00:29.288 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Saturday 15 February 2025 11:43:31 -0500 (0:00:00.050) 0:00:29.339 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Saturday 15 February 2025 11:43:31 -0500 (0:00:00.041) 0:00:29.380 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Saturday 15 February 2025 11:43:31 -0500 (0:00:00.046) 0:00:29.427 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Saturday 15 February 2025 11:43:31 -0500 (0:00:00.042) 0:00:29.469 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Saturday 15 February 2025 11:43:31 -0500 (0:00:00.035) 0:00:29.505 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Saturday 15 February 2025 11:43:31 -0500 (0:00:00.029) 0:00:29.535 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Saturday 15 February 2025 11:43:31 -0500 (0:00:00.071) 0:00:29.606 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Saturday 15 February 2025 11:43:31 -0500 (0:00:00.027) 0:00:29.634 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Saturday 15 February 2025 11:43:31 -0500 (0:00:00.031) 0:00:29.665 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Saturday 15 February 2025 11:43:31 -0500 (0:00:00.049) 0:00:29.715 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Saturday 15 February 2025 11:43:31 -0500 (0:00:00.061) 0:00:29.776 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 15 February 2025 11:43:32 -0500 (0:00:00.182) 0:00:29.959 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Pod": { "PodName": "quadlet-pod" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 15 February 2025 11:43:32 -0500 (0:00:00.072) 0:00:30.032 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_pod" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 15 February 2025 11:43:32 -0500 (0:00:00.065) 0:00:30.097 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 15 February 2025 11:43:32 -0500 (0:00:00.054) 0:00:30.151 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-pod", "__podman_quadlet_type": "pod", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 15 February 2025 11:43:32 -0500 (0:00:00.123) 0:00:30.275 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 15 February 2025 11:43:32 -0500 (0:00:00.160) 0:00:30.436 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 15 February 2025 11:43:32 -0500 (0:00:00.112) 0:00:30.548 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 15 February 2025 11:43:32 -0500 (0:00:00.067) 0:00:30.615 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 15 February 2025 11:43:32 -0500 (0:00:00.084) 0:00:30.700 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637482.3586853, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1739637469.6276166, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9120230, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "657259727", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 15 February 2025 11:43:33 -0500 (0:00:00.418) 0:00:31.118 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.004213", "end": "2025-02-15 11:43:33.477197", "rc": 0, "start": "2025-02-15 11:43:33.472984" } STDOUT: 0: user_quadlet_pod 655360 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 15 February 2025 11:43:33 -0500 (0:00:00.397) 0:00:31.515 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.005313", "end": "2025-02-15 11:43:33.902578", "rc": 0, "start": "2025-02-15 11:43:33.897265" } STDOUT: 0: user_quadlet_pod 655360 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 15 February 2025 11:43:33 -0500 (0:00:00.407) 0:00:31.923 ***** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 655360 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 655360 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 15 February 2025 11:43:34 -0500 (0:00:00.049) 0:00:31.972 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 15 February 2025 11:43:34 -0500 (0:00:00.031) 0:00:32.004 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 15 February 2025 11:43:34 -0500 (0:00:00.080) 0:00:32.085 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 15 February 2025 11:43:34 -0500 (0:00:00.061) 0:00:32.147 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 15 February 2025 11:43:34 -0500 (0:00:00.060) 0:00:32.208 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 15 February 2025 11:43:34 -0500 (0:00:00.067) 0:00:32.275 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-pod-pod.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_pod", "__podman_xdg_runtime_dir": "/run/user/2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 15 February 2025 11:43:34 -0500 (0:00:00.093) 0:00:32.369 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_pod/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 15 February 2025 11:43:34 -0500 (0:00:00.054) 0:00:32.423 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 15 February 2025 11:43:34 -0500 (0:00:00.055) 0:00:32.479 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 15 February 2025 11:43:34 -0500 (0:00:00.120) 0:00:32.599 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 15 February 2025 11:43:34 -0500 (0:00:00.062) 0:00:32.662 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 15 February 2025 11:43:34 -0500 (0:00:00.046) 0:00:32.709 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 15 February 2025 11:43:34 -0500 (0:00:00.112) 0:00:32.821 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 15 February 2025 11:43:34 -0500 (0:00:00.101) 0:00:32.923 ***** changed: [managed-node3] => { "changed": true, "cmd": [ "loginctl", "enable-linger", "user_quadlet_pod" ], "delta": "0:00:00.016729", "end": "2025-02-15 11:43:35.307560", "rc": 0, "start": "2025-02-15 11:43:35.290831" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 15 February 2025 11:43:35 -0500 (0:00:00.452) 0:00:33.375 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 15 February 2025 11:43:35 -0500 (0:00:00.060) 0:00:33.435 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') == 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 15 February 2025 11:43:35 -0500 (0:00:00.052) 0:00:33.488 ***** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 15 February 2025 11:43:35 -0500 (0:00:00.045) 0:00:33.533 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 15 February 2025 11:43:35 -0500 (0:00:00.050) 0:00:33.583 ***** changed: [managed-node3] => { "changed": true, "gid": 2223, "group": "user_quadlet_pod", "mode": "0755", "owner": "user_quadlet_pod", "path": "/home/user_quadlet_pod/.config/containers/systemd", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 6, "state": "directory", "uid": 2223 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 15 February 2025 11:43:36 -0500 (0:00:00.433) 0:00:34.017 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 15 February 2025 11:43:36 -0500 (0:00:00.053) 0:00:34.070 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 15 February 2025 11:43:36 -0500 (0:00:00.053) 0:00:34.123 ***** changed: [managed-node3] => { "changed": true, "checksum": "1884c880482430d8bf2e944b003734fb8b7a462d", "dest": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "gid": 2223, "group": "user_quadlet_pod", "md5sum": "43c9e9c2ff3ad9cd27c1f2d12f03aee0", "mode": "0644", "owner": "user_quadlet_pod", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 70, "src": "/root/.ansible/tmp/ansible-tmp-1739637816.2245555-23775-168630216824945/.source.pod", "state": "file", "uid": 2223 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 15 February 2025 11:43:37 -0500 (0:00:00.859) 0:00:34.983 ***** [WARNING]: Module remote_tmp /home/user_quadlet_pod/.ansible/tmp did not exist and was created with a mode of 0700, this may cause issues when running as another user. To avoid this, create the remote_tmp dir with the correct permissions manually ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 15 February 2025 11:43:37 -0500 (0:00:00.662) 0:00:35.646 ***** changed: [managed-node3] => { "changed": true, "name": "quadlet-pod-pod-pod.service", "state": "started", "status": { "AccessSELinuxContext": "unconfined_u:object_r:user_tmp_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "basic.target app.slice podman-user-wait-network-online.service run-user-2223.mount -.mount", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-pod-pod-pod.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698225152", "EffectiveMemoryMax": "3698225152", "EffectiveTasksMax": "22347", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPre": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/user/2223/quadlet-pod-pod-pod.pid --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPreEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/user/2223/quadlet-pod-pod-pod.pid --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --ignore --force ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --ignore --force ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/user/2223/systemd/generator/quadlet-pod-pod-pod.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-pod-pod.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13967", "LimitNPROCSoft": "13967", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13967", "LimitSIGPENDINGSoft": "13967", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3692822528", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-pod-pod.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "200", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/user/2223/quadlet-pod-pod-pod.pid", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "app.slice basic.target", "RequiresMountsFor": "/run/user/2223/containers", "Restart": "on-failure", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "app.slice", "SourcePath": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-pod-pod", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22347", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "forking", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "podman-user-wait-network-online.service", "WantsMountsFor": "/home/user_quadlet_pod", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity", "WorkingDirectory": "!/home/user_quadlet_pod" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 15 February 2025 11:43:39 -0500 (0:00:01.461) 0:00:37.108 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 15 February 2025 11:43:39 -0500 (0:00:00.052) 0:00:37.160 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "quadlet-pod-container", "Exec": "/bin/busybox-extras httpd -f -p 80", "Image": "quay.io/libpod/testimage:20210610", "Pod": "quadlet-pod-pod.pod" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 15 February 2025 11:43:39 -0500 (0:00:00.068) 0:00:37.229 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_pod" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 15 February 2025 11:43:39 -0500 (0:00:00.059) 0:00:37.288 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 15 February 2025 11:43:39 -0500 (0:00:00.036) 0:00:37.324 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-container", "__podman_quadlet_type": "container", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 15 February 2025 11:43:39 -0500 (0:00:00.053) 0:00:37.377 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 15 February 2025 11:43:39 -0500 (0:00:00.063) 0:00:37.441 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 15 February 2025 11:43:39 -0500 (0:00:00.034) 0:00:37.476 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 15 February 2025 11:43:39 -0500 (0:00:00.032) 0:00:37.509 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 15 February 2025 11:43:39 -0500 (0:00:00.043) 0:00:37.553 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637482.3586853, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1739637469.6276166, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9120230, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "657259727", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 15 February 2025 11:43:39 -0500 (0:00:00.381) 0:00:37.934 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.004604", "end": "2025-02-15 11:43:40.293772", "rc": 0, "start": "2025-02-15 11:43:40.289168" } STDOUT: 0: user_quadlet_pod 655360 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 15 February 2025 11:43:40 -0500 (0:00:00.405) 0:00:38.340 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.005451", "end": "2025-02-15 11:43:40.728709", "rc": 0, "start": "2025-02-15 11:43:40.723258" } STDOUT: 0: user_quadlet_pod 655360 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 15 February 2025 11:43:40 -0500 (0:00:00.428) 0:00:38.768 ***** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 655360 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 655360 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 15 February 2025 11:43:40 -0500 (0:00:00.076) 0:00:38.845 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 15 February 2025 11:43:40 -0500 (0:00:00.050) 0:00:38.896 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 15 February 2025 11:43:40 -0500 (0:00:00.051) 0:00:38.948 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 15 February 2025 11:43:41 -0500 (0:00:00.068) 0:00:39.017 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 15 February 2025 11:43:41 -0500 (0:00:00.053) 0:00:39.070 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 15 February 2025 11:43:41 -0500 (0:00:00.054) 0:00:39.125 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/libpod/testimage:20210610" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-container.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_pod", "__podman_xdg_runtime_dir": "/run/user/2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 15 February 2025 11:43:41 -0500 (0:00:00.219) 0:00:39.345 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_pod/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 15 February 2025 11:43:41 -0500 (0:00:00.053) 0:00:39.398 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 15 February 2025 11:43:41 -0500 (0:00:00.051) 0:00:39.450 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [ "quay.io/libpod/testimage:20210610" ], "__podman_quadlet_file": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 15 February 2025 11:43:41 -0500 (0:00:00.118) 0:00:39.569 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 15 February 2025 11:43:41 -0500 (0:00:00.060) 0:00:39.630 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 15 February 2025 11:43:41 -0500 (0:00:00.044) 0:00:39.674 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 15 February 2025 11:43:41 -0500 (0:00:00.115) 0:00:39.789 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 15 February 2025 11:43:41 -0500 (0:00:00.084) 0:00:39.874 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "loginctl", "enable-linger", "user_quadlet_pod" ], "delta": null, "end": null, "rc": 0, "start": null } STDOUT: skipped, since /var/lib/systemd/linger/user_quadlet_pod exists MSG: Did not run command since '/var/lib/systemd/linger/user_quadlet_pod' exists TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 15 February 2025 11:43:42 -0500 (0:00:00.410) 0:00:40.284 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 15 February 2025 11:43:42 -0500 (0:00:00.037) 0:00:40.321 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') == 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 15 February 2025 11:43:42 -0500 (0:00:00.032) 0:00:40.353 ***** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 15 February 2025 11:43:42 -0500 (0:00:00.027) 0:00:40.381 ***** changed: [managed-node3] => (item=None) => { "attempts": 1, "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 15 February 2025 11:43:44 -0500 (0:00:01.587) 0:00:41.968 ***** ok: [managed-node3] => { "changed": false, "gid": 2223, "group": "user_quadlet_pod", "mode": "0755", "owner": "user_quadlet_pod", "path": "/home/user_quadlet_pod/.config/containers/systemd", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 33, "state": "directory", "uid": 2223 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 15 February 2025 11:43:44 -0500 (0:00:00.433) 0:00:42.402 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 15 February 2025 11:43:44 -0500 (0:00:00.050) 0:00:42.453 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 15 February 2025 11:43:44 -0500 (0:00:00.051) 0:00:42.505 ***** changed: [managed-node3] => { "changed": true, "checksum": "f0b5c8159fc3c65bf9310a371751609e4c1ba4c3", "dest": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "gid": 2223, "group": "user_quadlet_pod", "md5sum": "daaf6e904ff3c17edeb801084cfe256f", "mode": "0644", "owner": "user_quadlet_pod", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 230, "src": "/root/.ansible/tmp/ansible-tmp-1739637824.605515-24058-264025358587335/.source.container", "state": "file", "uid": 2223 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 15 February 2025 11:43:45 -0500 (0:00:00.778) 0:00:43.284 ***** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 15 February 2025 11:43:46 -0500 (0:00:00.693) 0:00:43.978 ***** changed: [managed-node3] => { "changed": true, "name": "quadlet-pod-container.service", "state": "started", "status": { "AccessSELinuxContext": "unconfined_u:object_r:user_tmp_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "quadlet-pod-pod-pod.service podman-user-wait-network-online.service basic.target -.mount run-user-2223.mount app.slice", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target default.target", "BindLogSockets": "no", "BindsTo": "quadlet-pod-pod-pod.service", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuset io memory pids", "Description": "quadlet-pod-container.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698225152", "EffectiveMemoryMax": "3698225152", "EffectiveTasksMax": "22347", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/user/2223/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/user/2223/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/user/2223/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/user/2223/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/user/2223/systemd/generator/quadlet-pod-container.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-container.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13967", "LimitNPROCSoft": "13967", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13967", "LimitSIGPENDINGSoft": "13967", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3660886016", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-container.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "continue", "OOMScoreAdjust": "200", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "basic.target app.slice", "RequiresMountsFor": "/run/user/2223/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "app.slice", "SourcePath": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-container", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22347", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "default.target quadlet-pod-pod-pod.service", "Wants": "podman-user-wait-network-online.service", "WantsMountsFor": "/home/user_quadlet_pod", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity", "WorkingDirectory": "!/home/user_quadlet_pod" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 15 February 2025 11:43:46 -0500 (0:00:00.780) 0:00:44.758 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Saturday 15 February 2025 11:43:46 -0500 (0:00:00.115) 0:00:44.873 ***** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Saturday 15 February 2025 11:43:46 -0500 (0:00:00.043) 0:00:44.916 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Saturday 15 February 2025 11:43:47 -0500 (0:00:00.046) 0:00:44.963 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Check files] ************************************************************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:70 Saturday 15 February 2025 11:43:47 -0500 (0:00:00.057) 0:00:45.021 ***** ok: [managed-node3] => (item=quadlet-pod-container.container) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "cat", "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container" ], "delta": "0:00:00.003080", "end": "2025-02-15 11:43:47.380323", "item": "quadlet-pod-container.container", "rc": 0, "start": "2025-02-15 11:43:47.377243" } STDOUT: # # Ansible managed # # system_role:podman [Install] WantedBy=default.target [Container] Image=quay.io/libpod/testimage:20210610 ContainerName=quadlet-pod-container Pod=quadlet-pod-pod.pod Exec=/bin/busybox-extras httpd -f -p 80 ok: [managed-node3] => (item=quadlet-pod-pod.pod) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "cat", "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod" ], "delta": "0:00:00.003239", "end": "2025-02-15 11:43:47.723409", "item": "quadlet-pod-pod.pod", "rc": 0, "start": "2025-02-15 11:43:47.720170" } STDOUT: # # Ansible managed # # system_role:podman [Pod] PodName=quadlet-pod TASK [Check pod] *************************************************************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:79 Saturday 15 February 2025 11:43:47 -0500 (0:00:00.723) 0:00:45.744 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "pod", "inspect", "quadlet-pod", "--format", "{{range .Containers}}{{.Name}}\n{{end}}" ], "delta": "0:00:00.055012", "end": "2025-02-15 11:43:48.220519", "failed_when_result": false, "rc": 0, "start": "2025-02-15 11:43:48.165507" } STDOUT: quadlet-pod-infra quadlet-pod-container TASK [Ensure linger] *********************************************************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:91 Saturday 15 February 2025 11:43:48 -0500 (0:00:00.501) 0:00:46.246 ***** ok: [managed-node3] => { "changed": false, "failed_when_result": false, "stat": { "atime": 1739637815.297595, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1739637815.297595, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 4323997, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0644", "mtime": 1739637815.297595, "nlink": 1, "path": "/var/lib/systemd/linger/user_quadlet_pod", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 0, "uid": 0, "version": "2942919167", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Cleanup user] ************************************************************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:99 Saturday 15 February 2025 11:43:48 -0500 (0:00:00.401) 0:00:46.648 ***** included: fedora.linux_system_roles.podman for managed-node3 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 15 February 2025 11:43:48 -0500 (0:00:00.116) 0:00:46.764 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 15 February 2025 11:43:48 -0500 (0:00:00.082) 0:00:46.847 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 15 February 2025 11:43:48 -0500 (0:00:00.058) 0:00:46.905 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 15 February 2025 11:43:48 -0500 (0:00:00.038) 0:00:46.944 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 15 February 2025 11:43:49 -0500 (0:00:00.035) 0:00:46.979 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 15 February 2025 11:43:49 -0500 (0:00:00.035) 0:00:47.015 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 15 February 2025 11:43:49 -0500 (0:00:00.037) 0:00:47.053 ***** ok: [managed-node3] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node3] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node3] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node3] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 15 February 2025 11:43:49 -0500 (0:00:00.078) 0:00:47.132 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 15 February 2025 11:43:50 -0500 (0:00:00.950) 0:00:48.083 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 15 February 2025 11:43:50 -0500 (0:00:00.031) 0:00:48.114 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 15 February 2025 11:43:50 -0500 (0:00:00.040) 0:00:48.155 ***** skipping: [managed-node3] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 15 February 2025 11:43:50 -0500 (0:00:00.031) 0:00:48.186 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 15 February 2025 11:43:50 -0500 (0:00:00.030) 0:00:48.217 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 15 February 2025 11:43:50 -0500 (0:00:00.032) 0:00:48.249 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.027140", "end": "2025-02-15 11:43:50.628145", "rc": 0, "start": "2025-02-15 11:43:50.601005" } STDOUT: podman version 5.3.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 15 February 2025 11:43:50 -0500 (0:00:00.419) 0:00:48.668 ***** ok: [managed-node3] => { "ansible_facts": { "podman_version": "5.3.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 15 February 2025 11:43:50 -0500 (0:00:00.054) 0:00:48.722 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 15 February 2025 11:43:50 -0500 (0:00:00.033) 0:00:48.756 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 15 February 2025 11:43:50 -0500 (0:00:00.044) 0:00:48.800 ***** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 15 February 2025 11:43:50 -0500 (0:00:00.047) 0:00:48.848 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"5.0\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 15 February 2025 11:43:50 -0500 (0:00:00.056) 0:00:48.905 ***** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 15 February 2025 11:43:51 -0500 (0:00:00.053) 0:00:48.958 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 15 February 2025 11:43:51 -0500 (0:00:00.059) 0:00:49.018 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 15 February 2025 11:43:51 -0500 (0:00:00.035) 0:00:49.053 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 15 February 2025 11:43:51 -0500 (0:00:00.033) 0:00:49.086 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 15 February 2025 11:43:51 -0500 (0:00:00.041) 0:00:49.128 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637482.3586853, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1739637469.6276166, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9120230, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "657259727", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 15 February 2025 11:43:51 -0500 (0:00:00.388) 0:00:49.516 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.003903", "end": "2025-02-15 11:43:51.892387", "rc": 0, "start": "2025-02-15 11:43:51.888484" } STDOUT: 0: user_quadlet_pod 655360 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 15 February 2025 11:43:52 -0500 (0:00:00.483) 0:00:49.999 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.005261", "end": "2025-02-15 11:43:52.387834", "rc": 0, "start": "2025-02-15 11:43:52.382573" } STDOUT: 0: user_quadlet_pod 655360 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 15 February 2025 11:43:52 -0500 (0:00:00.410) 0:00:50.410 ***** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 655360 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 655360 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 15 February 2025 11:43:52 -0500 (0:00:00.049) 0:00:50.459 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 15 February 2025 11:43:52 -0500 (0:00:00.032) 0:00:50.491 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 15 February 2025 11:43:52 -0500 (0:00:00.040) 0:00:50.532 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 15 February 2025 11:43:52 -0500 (0:00:00.052) 0:00:50.584 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 15 February 2025 11:43:52 -0500 (0:00:00.055) 0:00:50.640 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 15 February 2025 11:43:52 -0500 (0:00:00.047) 0:00:50.687 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_container_conf_file": "/home/user_quadlet_pod/.config/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/home/user_quadlet_pod/.config/containers/policy.json", "__podman_registries_conf_file": "/home/user_quadlet_pod/.config/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/home/user_quadlet_pod/.config/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Saturday 15 February 2025 11:43:52 -0500 (0:00:00.044) 0:00:50.732 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 15 February 2025 11:43:52 -0500 (0:00:00.066) 0:00:50.799 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 15 February 2025 11:43:52 -0500 (0:00:00.035) 0:00:50.834 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Saturday 15 February 2025 11:43:52 -0500 (0:00:00.031) 0:00:50.866 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 15 February 2025 11:43:52 -0500 (0:00:00.055) 0:00:50.921 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 15 February 2025 11:43:52 -0500 (0:00:00.029) 0:00:50.951 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Saturday 15 February 2025 11:43:53 -0500 (0:00:00.030) 0:00:50.981 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Saturday 15 February 2025 11:43:53 -0500 (0:00:00.061) 0:00:51.043 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Saturday 15 February 2025 11:43:53 -0500 (0:00:00.041) 0:00:51.085 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Saturday 15 February 2025 11:43:53 -0500 (0:00:00.050) 0:00:51.135 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Saturday 15 February 2025 11:43:53 -0500 (0:00:00.122) 0:00:51.257 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Saturday 15 February 2025 11:43:53 -0500 (0:00:00.035) 0:00:51.293 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Saturday 15 February 2025 11:43:53 -0500 (0:00:00.031) 0:00:51.325 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Saturday 15 February 2025 11:43:53 -0500 (0:00:00.029) 0:00:51.355 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Saturday 15 February 2025 11:43:53 -0500 (0:00:00.032) 0:00:51.388 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Saturday 15 February 2025 11:43:53 -0500 (0:00:00.031) 0:00:51.419 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Saturday 15 February 2025 11:43:53 -0500 (0:00:00.030) 0:00:51.450 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Saturday 15 February 2025 11:43:53 -0500 (0:00:00.030) 0:00:51.480 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Saturday 15 February 2025 11:43:53 -0500 (0:00:00.031) 0:00:51.511 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Saturday 15 February 2025 11:43:53 -0500 (0:00:00.039) 0:00:51.551 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Saturday 15 February 2025 11:43:53 -0500 (0:00:00.045) 0:00:51.596 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Saturday 15 February 2025 11:43:53 -0500 (0:00:00.029) 0:00:51.626 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 15 February 2025 11:43:53 -0500 (0:00:00.103) 0:00:51.729 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "quadlet-pod-container", "Exec": "/bin/busybox-extras httpd -f -p 80", "Image": "quay.io/libpod/testimage:20210610", "Pod": "quadlet-pod-pod.pod" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 15 February 2025 11:43:53 -0500 (0:00:00.042) 0:00:51.772 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_pod" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 15 February 2025 11:43:53 -0500 (0:00:00.039) 0:00:51.811 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 15 February 2025 11:43:53 -0500 (0:00:00.030) 0:00:51.841 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-container", "__podman_quadlet_type": "container", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 15 February 2025 11:43:53 -0500 (0:00:00.047) 0:00:51.889 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 15 February 2025 11:43:53 -0500 (0:00:00.055) 0:00:51.945 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 15 February 2025 11:43:54 -0500 (0:00:00.036) 0:00:51.982 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 15 February 2025 11:43:54 -0500 (0:00:00.112) 0:00:52.094 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 15 February 2025 11:43:54 -0500 (0:00:00.048) 0:00:52.143 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637482.3586853, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1739637469.6276166, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9120230, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "657259727", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 15 February 2025 11:43:54 -0500 (0:00:00.399) 0:00:52.542 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.004606", "end": "2025-02-15 11:43:54.909684", "rc": 0, "start": "2025-02-15 11:43:54.905078" } STDOUT: 0: user_quadlet_pod 655360 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 15 February 2025 11:43:54 -0500 (0:00:00.398) 0:00:52.940 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.006015", "end": "2025-02-15 11:43:55.317391", "rc": 0, "start": "2025-02-15 11:43:55.311376" } STDOUT: 0: user_quadlet_pod 655360 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 15 February 2025 11:43:55 -0500 (0:00:00.399) 0:00:53.340 ***** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 655360 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 655360 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 15 February 2025 11:43:55 -0500 (0:00:00.047) 0:00:53.387 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 15 February 2025 11:43:55 -0500 (0:00:00.034) 0:00:53.422 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 15 February 2025 11:43:55 -0500 (0:00:00.032) 0:00:53.454 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 15 February 2025 11:43:55 -0500 (0:00:00.034) 0:00:53.488 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 15 February 2025 11:43:55 -0500 (0:00:00.032) 0:00:53.521 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 15 February 2025 11:43:55 -0500 (0:00:00.033) 0:00:53.554 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/libpod/testimage:20210610" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-container.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_pod", "__podman_xdg_runtime_dir": "/run/user/2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 15 February 2025 11:43:55 -0500 (0:00:00.051) 0:00:53.605 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_pod/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 15 February 2025 11:43:55 -0500 (0:00:00.034) 0:00:53.640 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 15 February 2025 11:43:55 -0500 (0:00:00.028) 0:00:53.668 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [ "quay.io/libpod/testimage:20210610" ], "__podman_quadlet_file": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 15 February 2025 11:43:55 -0500 (0:00:00.074) 0:00:53.743 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 15 February 2025 11:43:55 -0500 (0:00:00.037) 0:00:53.781 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 15 February 2025 11:43:55 -0500 (0:00:00.075) 0:00:53.856 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637815.3375952, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1739637826.631666, "dev": 72, "device_type": 0, "executable": true, "exists": true, "gid": 2223, "gr_name": "user_quadlet_pod", "inode": 1, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0700", "mtime": 1739637826.631666, "nlink": 7, "path": "/run/user/2223", "pw_name": "user_quadlet_pod", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 220, "uid": 2223, "version": null, "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": true } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 15 February 2025 11:43:56 -0500 (0:00:00.389) 0:00:54.246 ***** changed: [managed-node3] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-pod-container.service", "state": "stopped", "status": { "AccessSELinuxContext": "unconfined_u:object_r:user_tmp_t:s0", "ActiveEnterTimestamp": "Sat 2025-02-15 11:43:46 EST", "ActiveEnterTimestampMonotonic": "946920289", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "quadlet-pod-pod-pod.service podman-user-wait-network-online.service basic.target -.mount run-user-2223.mount app.slice", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-02-15 11:43:46 EST", "AssertTimestampMonotonic": "946788394", "Before": "shutdown.target default.target", "BindLogSockets": "no", "BindsTo": "quadlet-pod-pod-pod.service", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "98382000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-02-15 11:43:46 EST", "ConditionTimestampMonotonic": "946788388", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/user.slice/user-2223.slice/user@2223.service/app.slice/quadlet-pod-container.service", "ControlGroupId": "12416", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuset io memory pids", "Description": "quadlet-pod-container.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698225152", "EffectiveMemoryMax": "3698225152", "EffectiveTasksMax": "22347", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "64427", "ExecMainStartTimestamp": "Sat 2025-02-15 11:43:46 EST", "ExecMainStartTimestampMonotonic": "946870001", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/user/2223/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/user/2223/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; ignore_errors=no ; start_time=[Sat 2025-02-15 11:43:46 EST] ; stop_time=[n/a] ; pid=64417 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/user/2223/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/user/2223/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; flags= ; start_time=[Sat 2025-02-15 11:43:46 EST] ; stop_time=[n/a] ; pid=64417 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/user/2223/systemd/generator/quadlet-pod-container.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-container.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-02-15 11:43:46 EST", "InactiveExitTimestampMonotonic": "946795413", "InvocationID": "ff9779880f8d48468661a88781985414", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13967", "LimitNPROCSoft": "13967", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13967", "LimitSIGPENDINGSoft": "13967", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "64427", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3657904128", "MemoryCurrent": "884736", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "20865024", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-container.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "continue", "OOMScoreAdjust": "200", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "basic.target app.slice", "RequiresMountsFor": "/run/user/2223/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "app.slice", "SourcePath": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-02-15 11:43:46 EST", "StateChangeTimestampMonotonic": "946920289", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-container", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22347", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "default.target quadlet-pod-pod-pod.service", "Wants": "podman-user-wait-network-online.service", "WantsMountsFor": "/home/user_quadlet_pod", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0", "WorkingDirectory": "!/home/user_quadlet_pod" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 15 February 2025 11:44:07 -0500 (0:00:10.995) 0:01:05.241 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637825.9046617, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "f0b5c8159fc3c65bf9310a371751609e4c1ba4c3", "ctime": 1739637825.2446578, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 2223, "gr_name": "user_quadlet_pod", "inode": 541065413, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1739637824.9746563, "nlink": 1, "path": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "pw_name": "user_quadlet_pod", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 230, "uid": 2223, "version": "3483678146", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 15 February 2025 11:44:07 -0500 (0:00:00.417) 0:01:05.659 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 15 February 2025 11:44:07 -0500 (0:00:00.155) 0:01:05.814 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 15 February 2025 11:44:08 -0500 (0:00:00.530) 0:01:06.345 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 15 February 2025 11:44:08 -0500 (0:00:00.056) 0:01:06.401 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 15 February 2025 11:44:08 -0500 (0:00:00.032) 0:01:06.434 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 15 February 2025 11:44:08 -0500 (0:00:00.032) 0:01:06.466 ***** changed: [managed-node3] => { "changed": true, "path": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 15 February 2025 11:44:08 -0500 (0:00:00.413) 0:01:06.879 ***** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 15 February 2025 11:44:09 -0500 (0:00:00.692) 0:01:07.572 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 15 February 2025 11:44:10 -0500 (0:00:00.504) 0:01:08.076 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 15 February 2025 11:44:10 -0500 (0:00:00.044) 0:01:08.120 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 15 February 2025 11:44:10 -0500 (0:00:00.032) 0:01:08.153 ***** changed: [managed-node3] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.066384", "end": "2025-02-15 11:44:10.667654", "rc": 0, "start": "2025-02-15 11:44:10.601270" } STDOUT: 7aacd241b66e9a50cbaff9a5b70ac2541960a2bced24073b2d466f20bbd08382 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 15 February 2025 11:44:10 -0500 (0:00:00.537) 0:01:08.691 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 15 February 2025 11:44:10 -0500 (0:00:00.060) 0:01:08.751 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 15 February 2025 11:44:10 -0500 (0:00:00.049) 0:01:08.801 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 15 February 2025 11:44:10 -0500 (0:00:00.050) 0:01:08.851 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_pod" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 15 February 2025 11:44:10 -0500 (0:00:00.043) 0:01:08.895 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.041586", "end": "2025-02-15 11:44:11.369452", "rc": 0, "start": "2025-02-15 11:44:11.327866" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 15 February 2025 11:44:11 -0500 (0:00:00.498) 0:01:09.394 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.047278", "end": "2025-02-15 11:44:11.869271", "rc": 0, "start": "2025-02-15 11:44:11.821993" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 15 February 2025 11:44:11 -0500 (0:00:00.504) 0:01:09.898 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.042305", "end": "2025-02-15 11:44:12.368453", "rc": 0, "start": "2025-02-15 11:44:12.326148" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 15 February 2025 11:44:12 -0500 (0:00:00.510) 0:01:10.408 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.043649", "end": "2025-02-15 11:44:12.905437", "rc": 0, "start": "2025-02-15 11:44:12.861788" } STDOUT: podman TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 15 February 2025 11:44:12 -0500 (0:00:00.524) 0:01:10.933 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 15 February 2025 11:44:13 -0500 (0:00:00.489) 0:01:11.422 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 15 February 2025 11:44:14 -0500 (0:00:00.546) 0:01:11.969 ***** ok: [managed-node3] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "apt-daily.service": { "name": "apt-daily.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "inactive", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-pod-container.service": { "name": "quadlet-pod-container.service", "source": "systemd", "state": "running", "status": "generated" }, "quadlet-pod-pod-pod.service": { "name": "quadlet-pod-pod-pod.service", "source": "systemd", "state": "running", "status": "generated" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd-vsock@.service": { "name": "sshd-vsock@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-userdbd.service": { "name": "systemd-userdbd.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@2223.service": { "name": "user-runtime-dir@2223.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@2223.service": { "name": "user@2223.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 15 February 2025 11:44:16 -0500 (0:00:02.210) 0:01:14.179 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 15 February 2025 11:44:16 -0500 (0:00:00.033) 0:01:14.212 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Pod": { "PodName": "quadlet-pod" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 15 February 2025 11:44:16 -0500 (0:00:00.044) 0:01:14.257 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_pod" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 15 February 2025 11:44:16 -0500 (0:00:00.043) 0:01:14.301 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 15 February 2025 11:44:16 -0500 (0:00:00.034) 0:01:14.335 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-pod", "__podman_quadlet_type": "pod", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 15 February 2025 11:44:16 -0500 (0:00:00.051) 0:01:14.387 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 15 February 2025 11:44:16 -0500 (0:00:00.063) 0:01:14.450 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 15 February 2025 11:44:16 -0500 (0:00:00.039) 0:01:14.490 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 15 February 2025 11:44:16 -0500 (0:00:00.044) 0:01:14.535 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 15 February 2025 11:44:16 -0500 (0:00:00.050) 0:01:14.585 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637482.3586853, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1739637469.6276166, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9120230, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "657259727", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 15 February 2025 11:44:17 -0500 (0:00:00.398) 0:01:14.983 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.004071", "end": "2025-02-15 11:44:17.370146", "rc": 0, "start": "2025-02-15 11:44:17.366075" } STDOUT: 0: user_quadlet_pod 655360 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 15 February 2025 11:44:17 -0500 (0:00:00.433) 0:01:15.416 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.005288", "end": "2025-02-15 11:44:17.811748", "rc": 0, "start": "2025-02-15 11:44:17.806460" } STDOUT: 0: user_quadlet_pod 655360 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 15 February 2025 11:44:17 -0500 (0:00:00.425) 0:01:15.842 ***** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 655360 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 655360 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 15 February 2025 11:44:17 -0500 (0:00:00.056) 0:01:15.899 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 15 February 2025 11:44:17 -0500 (0:00:00.039) 0:01:15.938 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 15 February 2025 11:44:18 -0500 (0:00:00.087) 0:01:16.025 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 15 February 2025 11:44:18 -0500 (0:00:00.034) 0:01:16.060 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 15 February 2025 11:44:18 -0500 (0:00:00.036) 0:01:16.097 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 15 February 2025 11:44:18 -0500 (0:00:00.038) 0:01:16.135 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-pod-pod.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_pod", "__podman_xdg_runtime_dir": "/run/user/2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 15 February 2025 11:44:18 -0500 (0:00:00.087) 0:01:16.223 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_pod/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 15 February 2025 11:44:18 -0500 (0:00:00.058) 0:01:16.281 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 15 February 2025 11:44:18 -0500 (0:00:00.051) 0:01:16.332 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 15 February 2025 11:44:18 -0500 (0:00:00.124) 0:01:16.456 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 15 February 2025 11:44:18 -0500 (0:00:00.065) 0:01:16.521 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 15 February 2025 11:44:18 -0500 (0:00:00.145) 0:01:16.667 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637815.3375952, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1739637847.401787, "dev": 72, "device_type": 0, "executable": true, "exists": true, "gid": 2223, "gr_name": "user_quadlet_pod", "inode": 1, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0700", "mtime": 1739637847.401787, "nlink": 7, "path": "/run/user/2223", "pw_name": "user_quadlet_pod", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 160, "uid": 2223, "version": null, "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": true } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 15 February 2025 11:44:19 -0500 (0:00:00.441) 0:01:17.108 ***** changed: [managed-node3] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-pod-pod-pod.service", "state": "stopped", "status": { "AccessSELinuxContext": "unconfined_u:object_r:user_tmp_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "-.mount basic.target run-user-2223.mount podman-user-wait-network-online.service app.slice", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-pod-pod-pod.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698225152", "EffectiveMemoryMax": "3698225152", "EffectiveTasksMax": "22347", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPre": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/user/2223/quadlet-pod-pod-pod.pid --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPreEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/user/2223/quadlet-pod-pod-pod.pid --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --ignore --force ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --ignore --force ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/user/2223/systemd/generator/quadlet-pod-pod-pod.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-pod-pod.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13967", "LimitNPROCSoft": "13967", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13967", "LimitSIGPENDINGSoft": "13967", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3690270720", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-pod-pod.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "200", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/user/2223/quadlet-pod-pod-pod.pid", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "basic.target app.slice", "RequiresMountsFor": "/run/user/2223/containers", "Restart": "on-failure", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "app.slice", "SourcePath": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-pod-pod", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22347", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "forking", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "podman-user-wait-network-online.service", "WantsMountsFor": "/home/user_quadlet_pod", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity", "WorkingDirectory": "!/home/user_quadlet_pod" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 15 February 2025 11:44:19 -0500 (0:00:00.707) 0:01:17.815 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637817.5826097, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "1884c880482430d8bf2e944b003734fb8b7a462d", "ctime": 1739637816.869605, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 2223, "gr_name": "user_quadlet_pod", "inode": 327155915, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1739637816.5836031, "nlink": 1, "path": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "pw_name": "user_quadlet_pod", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 70, "uid": 2223, "version": "1978394370", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 15 February 2025 11:44:20 -0500 (0:00:00.394) 0:01:18.210 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 15 February 2025 11:44:20 -0500 (0:00:00.060) 0:01:18.270 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 15 February 2025 11:44:20 -0500 (0:00:00.369) 0:01:18.639 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 15 February 2025 11:44:20 -0500 (0:00:00.051) 0:01:18.691 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 15 February 2025 11:44:20 -0500 (0:00:00.088) 0:01:18.779 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 15 February 2025 11:44:20 -0500 (0:00:00.034) 0:01:18.814 ***** changed: [managed-node3] => { "changed": true, "path": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 15 February 2025 11:44:21 -0500 (0:00:00.386) 0:01:19.200 ***** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 15 February 2025 11:44:21 -0500 (0:00:00.680) 0:01:19.881 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 15 February 2025 11:44:22 -0500 (0:00:00.549) 0:01:20.431 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 15 February 2025 11:44:22 -0500 (0:00:00.082) 0:01:20.513 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 15 February 2025 11:44:22 -0500 (0:00:00.044) 0:01:20.557 ***** changed: [managed-node3] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.055560", "end": "2025-02-15 11:44:23.042589", "rc": 0, "start": "2025-02-15 11:44:22.987029" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 15 February 2025 11:44:23 -0500 (0:00:00.520) 0:01:21.078 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 15 February 2025 11:44:23 -0500 (0:00:00.061) 0:01:21.140 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 15 February 2025 11:44:23 -0500 (0:00:00.036) 0:01:21.176 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 15 February 2025 11:44:23 -0500 (0:00:00.037) 0:01:21.214 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_pod" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 15 February 2025 11:44:23 -0500 (0:00:00.040) 0:01:21.254 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.041700", "end": "2025-02-15 11:44:23.717450", "rc": 0, "start": "2025-02-15 11:44:23.675750" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 15 February 2025 11:44:23 -0500 (0:00:00.495) 0:01:21.750 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.042634", "end": "2025-02-15 11:44:24.218358", "rc": 0, "start": "2025-02-15 11:44:24.175724" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 15 February 2025 11:44:24 -0500 (0:00:00.499) 0:01:22.250 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.046841", "end": "2025-02-15 11:44:24.724566", "rc": 0, "start": "2025-02-15 11:44:24.677725" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 15 February 2025 11:44:24 -0500 (0:00:00.502) 0:01:22.752 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.043676", "end": "2025-02-15 11:44:25.250721", "rc": 0, "start": "2025-02-15 11:44:25.207045" } STDOUT: podman TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 15 February 2025 11:44:25 -0500 (0:00:00.527) 0:01:23.279 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 15 February 2025 11:44:25 -0500 (0:00:00.508) 0:01:23.788 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 15 February 2025 11:44:26 -0500 (0:00:00.588) 0:01:24.377 ***** ok: [managed-node3] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "apt-daily.service": { "name": "apt-daily.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "inactive", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-pod-container.service": { "name": "quadlet-pod-container.service", "source": "systemd", "state": "running", "status": "generated" }, "quadlet-pod-pod-pod.service": { "name": "quadlet-pod-pod-pod.service", "source": "systemd", "state": "running", "status": "generated" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd-vsock@.service": { "name": "sshd-vsock@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-userdbd.service": { "name": "systemd-userdbd.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@2223.service": { "name": "user-runtime-dir@2223.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@2223.service": { "name": "user@2223.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 15 February 2025 11:44:28 -0500 (0:00:02.000) 0:01:26.378 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Saturday 15 February 2025 11:44:28 -0500 (0:00:00.033) 0:01:26.412 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml for managed-node3 => (item=user_quadlet_pod) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:4 Saturday 15 February 2025 11:44:28 -0500 (0:00:00.085) 0:01:26.498 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_linger_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set cancel linger vars] *************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:11 Saturday 15 February 2025 11:44:28 -0500 (0:00:00.039) 0:01:26.537 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_xdg_runtime_dir": "/run/user/2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:16 Saturday 15 February 2025 11:44:28 -0500 (0:00:00.043) 0:01:26.580 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637815.3375952, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1739637847.401787, "dev": 72, "device_type": 0, "executable": true, "exists": true, "gid": 2223, "gr_name": "user_quadlet_pod", "inode": 1, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0700", "mtime": 1739637847.401787, "nlink": 7, "path": "/run/user/2223", "pw_name": "user_quadlet_pod", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 160, "uid": 2223, "version": null, "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": true } } TASK [fedora.linux_system_roles.podman : Gather facts for containers] ********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:21 Saturday 15 February 2025 11:44:29 -0500 (0:00:00.381) 0:01:26.962 ***** ok: [managed-node3] => { "changed": false, "containers": [] } TASK [fedora.linux_system_roles.podman : Gather facts for networks] ************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:30 Saturday 15 February 2025 11:44:29 -0500 (0:00:00.575) 0:01:27.538 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "network", "ls", "-q" ], "delta": "0:00:00.049485", "end": "2025-02-15 11:44:30.015544", "rc": 0, "start": "2025-02-15 11:44:29.966059" } STDOUT: podman TASK [fedora.linux_system_roles.podman : Gather secrets] *********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:40 Saturday 15 February 2025 11:44:30 -0500 (0:00:00.524) 0:01:28.062 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "secret", "ls", "-n", "-q" ], "delta": "0:00:00.048200", "end": "2025-02-15 11:44:30.566716", "rc": 0, "start": "2025-02-15 11:44:30.518516" } TASK [fedora.linux_system_roles.podman : Cancel linger if no more resources are in use] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:50 Saturday 15 February 2025 11:44:30 -0500 (0:00:00.536) 0:01:28.599 ***** changed: [managed-node3] => { "changed": true, "cmd": [ "loginctl", "disable-linger", "user_quadlet_pod" ], "delta": "0:00:00.007513", "end": "2025-02-15 11:44:31.011178", "rc": 0, "start": "2025-02-15 11:44:31.003665" } TASK [fedora.linux_system_roles.podman : Wait for user session to exit closing state] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:62 Saturday 15 February 2025 11:44:31 -0500 (0:00:00.494) 0:01:29.093 ***** fatal: [managed-node3]: FAILED! => { "attempts": 1, "changed": false, "cmd": [ "loginctl", "show-user", "--value", "-p", "State", "user_quadlet_pod" ], "delta": "0:00:00.006275", "end": "2025-02-15 11:44:31.478538", "rc": 1, "start": "2025-02-15 11:44:31.472263" } STDERR: Failed to get user: User ID 2223 is not logged in or lingering MSG: non-zero return code ...ignoring TASK [fedora.linux_system_roles.podman : Stop logind] ************************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:76 Saturday 15 February 2025 11:44:31 -0500 (0:00:00.432) 0:01:29.525 ***** changed: [managed-node3] => { "changed": true, "name": "systemd-logind", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2025-02-15 11:42:49 EST", "ActiveEnterTimestampMonotonic": "889413658", "ActiveExitTimestamp": "Sat 2025-02-15 11:42:48 EST", "ActiveExitTimestampMonotonic": "888925975", "ActiveState": "active", "After": "nss-user-lookup.target dbus.socket modprobe@drm.service sysinit.target system.slice systemd-journald.socket basic.target tmp.mount systemd-tmpfiles-setup.service -.mount user.slice systemd-remount-fs.service", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-02-15 11:42:49 EST", "AssertTimestampMonotonic": "889363971", "Before": "session-8.scope multi-user.target user-runtime-dir@0.service user@0.service shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.freedesktop.login1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "168285000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanClean": "runtime state fdstore", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "yes", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_linux_immutable cap_sys_admin cap_sys_tty_config cap_audit_control cap_mac_admin", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-02-15 11:42:49 EST", "ConditionTimestampMonotonic": "889363968", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/systemd-logind.service", "ControlGroupId": "9144", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "User Login Management", "DeviceAllow": "block-* r", "DevicePolicy": "auto", "Documentation": "\"man:sd-login(3)\" \"man:systemd-logind.service(8)\" \"man:logind.conf(5)\" \"man:org.freedesktop.login1(5)\"", "DropInPaths": "/usr/lib/systemd/system/systemd-logind.service.d/10-grub2-logind-service.conf", "DynamicUser": "no", "EffectiveCPUs": "0-1", "EffectiveMemoryHigh": "3698225152", "EffectiveMemoryMax": "3698225152", "EffectiveMemoryNodes": "0", "EffectiveTasksMax": "22347", "Environment": "SYSTEMD_REBOOT_TO_BOOT_LOADER_MENU=true", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Sat 2025-02-15 11:42:49 EST", "ExecMainHandoffTimestampMonotonic": "889400101", "ExecMainPID": "56322", "ExecMainStartTimestamp": "Sat 2025-02-15 11:42:49 EST", "ExecMainStartTimestampMonotonic": "889367024", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/lib/systemd/systemd-logind ; argv[]=/usr/lib/systemd/systemd-logind ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/lib/systemd/systemd-logind ; argv[]=/usr/lib/systemd/systemd-logind ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "768", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/systemd-logind.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPAddressDeny": "::/0 0.0.0.0/0", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "systemd-logind.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Sat 2025-02-15 11:42:48 EST", "InactiveEnterTimestampMonotonic": "888940342", "InactiveExitTimestamp": "Sat 2025-02-15 11:42:49 EST", "InactiveExitTimestampMonotonic": "889367392", "InvocationID": "c6b5e8909b3b49249932699fc945aca6", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "524288", "LimitNPROC": "13967", "LimitNPROCSoft": "13967", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13967", "LimitSIGPENDINGSoft": "13967", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "56322", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3240755200", "MemoryCurrent": "5099520", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "5853184", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "2", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "systemd-logind.service dbus-org.freedesktop.login1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "yes", "NonBlocking": "no", "NotifyAccess": "main", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "yes", "PrivateTmpEx": "connected", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectControlGroupsEx": "yes", "ProtectHome": "yes", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "yes", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "strict", "ReadWritePaths": "/etc /run", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice -.mount sysinit.target", "RequiresMountsFor": "/run/systemd/users /run/systemd/sessions /run/systemd/seats /var/lib/systemd/linger /run/systemd/inhibit /run/systemd/shutdown", "Restart": "always", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "0", "RestartUSecNext": "0", "RestrictAddressFamilies": "AF_NETLINK AF_UNIX", "RestrictNamespaces": "yes", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectory": "systemd/inhibit systemd/seats systemd/sessions systemd/shutdown systemd/users", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "yes", "RuntimeDirectorySymlink": "systemd/users:", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-02-15 11:42:49 EST", "StateChangeTimestampMonotonic": "889413658", "StateDirectory": "systemd/linger", "StateDirectoryMode": "0755", "StateDirectorySymlink": "systemd/linger:", "StatusErrno": "0", "StatusText": "Processing requests...", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "1", "SystemCallFilter": "_llseek _newselect accept accept4 access add_key alarm arch_prctl arm_fadvise64_64 bind brk cacheflush capget capset chdir chmod chown chown32 clock_getres clock_getres_time64 clock_gettime clock_gettime64 clock_nanosleep clock_nanosleep_time64 clone clone3 close close_range connect copy_file_range creat dup dup2 dup3 epoll_create epoll_create1 epoll_ctl epoll_ctl_old epoll_pwait epoll_pwait2 epoll_wait epoll_wait_old eventfd eventfd2 execve execveat exit exit_group faccessat faccessat2 fadvise64 fadvise64_64 fallocate fchdir fchmod fchmodat fchown fchown32 fchownat fcntl fcntl64 fdatasync fgetxattr flistxattr flock fork fremovexattr fsetxattr fstat fstat64 fstatat64 fstatfs fstatfs64 fsync ftruncate ftruncate64 futex futex_time64 futimesat get_mempolicy get_robust_list get_thread_area getcpu getcwd getdents getdents64 getegid getegid32 geteuid geteuid32 getgid getgid32 getgroups getgroups32 getitimer getpeername getpgid getpgrp getpid getppid getpriority getrandom getresgid getresgid32 getresuid getresuid32 getrlimit getrusage getsid getsockname getsockopt gettid gettimeofday getuid getuid32 getxattr inotify_add_watch inotify_init inotify_init1 inotify_rm_watch io_cancel io_destroy io_getevents io_pgetevents io_pgetevents_time64 io_setup io_submit io_uring_enter io_uring_register io_uring_setup ioctl ioprio_get ioprio_set ipc kcmp keyctl kill landlock_add_rule landlock_create_ruleset landlock_restrict_self lchown lchown32 lgetxattr link linkat listen listxattr llistxattr lremovexattr lseek lsetxattr lstat lstat64 madvise mbind membarrier memfd_create migrate_pages mkdir mkdirat mknod mknodat mlock mlock2 mlockall mmap mmap2 move_pages mprotect mq_getsetattr mq_notify mq_open mq_timedreceive mq_timedreceive_time64 mq_timedsend mq_timedsend_time64 mq_unlink mremap msgctl msgget msgrcv msgsnd msync munlock munlockall munmap name_to_handle_at nanosleep newfstatat nice oldfstat oldlstat oldolduname oldstat olduname open openat openat2 pause personality pidfd_open pidfd_send_signal pipe pipe2 poll ppoll ppoll_time64 prctl pread64 preadv preadv2 prlimit64 process_madvise process_vm_readv process_vm_writev pselect6 pselect6_time64 pwrite64 pwritev pwritev2 read readahead readdir readlink readlinkat readv recv recvfrom recvmmsg recvmmsg_time64 recvmsg remap_file_pages removexattr rename renameat renameat2 request_key restart_syscall riscv_flush_icache rmdir rseq rt_sigaction rt_sigpending rt_sigprocmask rt_sigqueueinfo rt_sigreturn rt_sigsuspend rt_sigtimedwait rt_sigtimedwait_time64 rt_tgsigqueueinfo sched_get_priority_max sched_get_priority_min sched_getaffinity sched_getattr sched_getparam sched_getscheduler sched_rr_get_interval sched_rr_get_interval_time64 sched_setaffinity sched_setattr sched_setparam sched_setscheduler sched_yield seccomp select semctl semget semop semtimedop semtimedop_time64 send sendfile sendfile64 sendmmsg sendmsg sendto set_mempolicy set_robust_list set_thread_area set_tid_address set_tls setfsgid setfsgid32 setfsuid setfsuid32 setgid setgid32 setgroups setgroups32 setitimer setns setpgid setpriority setregid setregid32 setresgid setresgid32 setresuid setresuid32 setreuid setreuid32 setrlimit setsid setsockopt setuid setuid32 setxattr shmat shmctl shmdt shmget shutdown sigaction sigaltstack signal signalfd signalfd4 sigpending sigprocmask sigreturn sigsuspend socket socketcall socketpair splice stat stat64 statfs statfs64 statx swapcontext symlink symlinkat sync sync_file_range sync_file_range2 syncfs sysinfo tee tgkill time timer_create timer_delete timer_getoverrun timer_gettime timer_gettime64 timer_settime timer_settime64 timerfd_create timerfd_gettime timerfd_gettime64 timerfd_settime timerfd_settime64 times tkill truncate truncate64 ugetrlimit umask uname unlink unlinkat unshare userfaultfd utime utimensat utimensat_time64 utimes vfork vmsplice wait4 waitid waitpid write writev", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "1", "TasksMax": "22347", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify-reload", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "static", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "dbus.socket user.slice -.mount modprobe@drm.service", "WantsMountsFor": "/tmp /var/tmp", "WatchdogSignal": "6", "WatchdogTimestamp": "Sat 2025-02-15 11:44:20 EST", "WatchdogTimestampMonotonic": "980423059", "WatchdogUSec": "3min" } } TASK [fedora.linux_system_roles.podman : Wait for user session to exit closing state] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:81 Saturday 15 February 2025 11:44:32 -0500 (0:00:00.582) 0:01:30.107 ***** ok: [managed-node3] => { "attempts": 1, "changed": false, "cmd": [ "loginctl", "show-user", "--value", "-p", "State", "user_quadlet_pod" ], "delta": "0:00:00.083330", "end": "2025-02-15 11:44:32.546788", "failed_when_result": false, "rc": 1, "start": "2025-02-15 11:44:32.463458" } STDERR: Failed to get user: User ID 2223 is not logged in or lingering MSG: non-zero return code TASK [fedora.linux_system_roles.podman : Restart logind] *********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:92 Saturday 15 February 2025 11:44:32 -0500 (0:00:00.468) 0:01:30.576 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__user_state is failed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Saturday 15 February 2025 11:44:32 -0500 (0:00:00.033) 0:01:30.609 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Saturday 15 February 2025 11:44:32 -0500 (0:00:00.105) 0:01:30.714 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Ensure no resources] ***************************************************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:109 Saturday 15 February 2025 11:44:32 -0500 (0:00:00.086) 0:01:30.801 ***** ok: [managed-node3] => { "changed": false } MSG: All assertions passed TASK [Ensure no linger] ******************************************************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:120 Saturday 15 February 2025 11:44:32 -0500 (0:00:00.092) 0:01:30.894 ***** ok: [managed-node3] => { "changed": false, "failed_when_result": false, "stat": { "exists": false } } TASK [Cleanup user] ************************************************************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:159 Saturday 15 February 2025 11:44:33 -0500 (0:00:00.396) 0:01:31.290 ***** included: fedora.linux_system_roles.podman for managed-node3 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 15 February 2025 11:44:33 -0500 (0:00:00.136) 0:01:31.427 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 15 February 2025 11:44:33 -0500 (0:00:00.093) 0:01:31.520 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 15 February 2025 11:44:33 -0500 (0:00:00.065) 0:01:31.586 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 15 February 2025 11:44:33 -0500 (0:00:00.039) 0:01:31.626 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 15 February 2025 11:44:33 -0500 (0:00:00.040) 0:01:31.666 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 15 February 2025 11:44:33 -0500 (0:00:00.042) 0:01:31.709 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 15 February 2025 11:44:33 -0500 (0:00:00.038) 0:01:31.747 ***** ok: [managed-node3] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node3] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node3] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node3] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 15 February 2025 11:44:33 -0500 (0:00:00.083) 0:01:31.831 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 15 February 2025 11:44:34 -0500 (0:00:01.022) 0:01:32.854 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 15 February 2025 11:44:34 -0500 (0:00:00.040) 0:01:32.895 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 15 February 2025 11:44:34 -0500 (0:00:00.048) 0:01:32.943 ***** skipping: [managed-node3] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 15 February 2025 11:44:35 -0500 (0:00:00.037) 0:01:32.981 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 15 February 2025 11:44:35 -0500 (0:00:00.035) 0:01:33.016 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 15 February 2025 11:44:35 -0500 (0:00:00.036) 0:01:33.053 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.028793", "end": "2025-02-15 11:44:35.429042", "rc": 0, "start": "2025-02-15 11:44:35.400249" } STDOUT: podman version 5.3.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 15 February 2025 11:44:35 -0500 (0:00:00.409) 0:01:33.463 ***** ok: [managed-node3] => { "ansible_facts": { "podman_version": "5.3.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 15 February 2025 11:44:35 -0500 (0:00:00.042) 0:01:33.506 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 15 February 2025 11:44:35 -0500 (0:00:00.034) 0:01:33.540 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 15 February 2025 11:44:35 -0500 (0:00:00.042) 0:01:33.582 ***** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 15 February 2025 11:44:35 -0500 (0:00:00.044) 0:01:33.627 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"5.0\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 15 February 2025 11:44:35 -0500 (0:00:00.057) 0:01:33.685 ***** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 15 February 2025 11:44:35 -0500 (0:00:00.080) 0:01:33.765 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 15 February 2025 11:44:35 -0500 (0:00:00.111) 0:01:33.877 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 15 February 2025 11:44:35 -0500 (0:00:00.060) 0:01:33.938 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 15 February 2025 11:44:36 -0500 (0:00:00.139) 0:01:34.077 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 15 February 2025 11:44:36 -0500 (0:00:00.064) 0:01:34.142 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637482.3586853, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1739637469.6276166, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9120230, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "657259727", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 15 February 2025 11:44:36 -0500 (0:00:00.414) 0:01:34.556 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.004174", "end": "2025-02-15 11:44:36.929225", "rc": 0, "start": "2025-02-15 11:44:36.925051" } STDOUT: 0: user_quadlet_pod 655360 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 15 February 2025 11:44:36 -0500 (0:00:00.396) 0:01:34.953 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.006223", "end": "2025-02-15 11:44:37.324573", "rc": 0, "start": "2025-02-15 11:44:37.318350" } STDOUT: 0: user_quadlet_pod 655360 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 15 February 2025 11:44:37 -0500 (0:00:00.396) 0:01:35.349 ***** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 655360 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 655360 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 15 February 2025 11:44:37 -0500 (0:00:00.058) 0:01:35.408 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 15 February 2025 11:44:37 -0500 (0:00:00.049) 0:01:35.458 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 15 February 2025 11:44:37 -0500 (0:00:00.059) 0:01:35.517 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 15 February 2025 11:44:37 -0500 (0:00:00.042) 0:01:35.559 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 15 February 2025 11:44:37 -0500 (0:00:00.041) 0:01:35.601 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 15 February 2025 11:44:37 -0500 (0:00:00.043) 0:01:35.644 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_container_conf_file": "/home/user_quadlet_pod/.config/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/home/user_quadlet_pod/.config/containers/policy.json", "__podman_registries_conf_file": "/home/user_quadlet_pod/.config/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/home/user_quadlet_pod/.config/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Saturday 15 February 2025 11:44:37 -0500 (0:00:00.047) 0:01:35.692 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 15 February 2025 11:44:37 -0500 (0:00:00.065) 0:01:35.758 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 15 February 2025 11:44:37 -0500 (0:00:00.035) 0:01:35.793 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Saturday 15 February 2025 11:44:37 -0500 (0:00:00.036) 0:01:35.829 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 15 February 2025 11:44:37 -0500 (0:00:00.085) 0:01:35.914 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 15 February 2025 11:44:38 -0500 (0:00:00.119) 0:01:36.034 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Saturday 15 February 2025 11:44:38 -0500 (0:00:00.041) 0:01:36.075 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Saturday 15 February 2025 11:44:38 -0500 (0:00:00.079) 0:01:36.155 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Saturday 15 February 2025 11:44:38 -0500 (0:00:00.035) 0:01:36.190 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Saturday 15 February 2025 11:44:38 -0500 (0:00:00.036) 0:01:36.226 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Saturday 15 February 2025 11:44:38 -0500 (0:00:00.069) 0:01:36.296 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Saturday 15 February 2025 11:44:38 -0500 (0:00:00.036) 0:01:36.332 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Saturday 15 February 2025 11:44:38 -0500 (0:00:00.039) 0:01:36.372 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Saturday 15 February 2025 11:44:38 -0500 (0:00:00.055) 0:01:36.428 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Saturday 15 February 2025 11:44:38 -0500 (0:00:00.048) 0:01:36.476 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Saturday 15 February 2025 11:44:38 -0500 (0:00:00.038) 0:01:36.514 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Saturday 15 February 2025 11:44:38 -0500 (0:00:00.040) 0:01:36.555 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Saturday 15 February 2025 11:44:38 -0500 (0:00:00.039) 0:01:36.594 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Saturday 15 February 2025 11:44:38 -0500 (0:00:00.033) 0:01:36.628 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Saturday 15 February 2025 11:44:38 -0500 (0:00:00.032) 0:01:36.660 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Saturday 15 February 2025 11:44:38 -0500 (0:00:00.031) 0:01:36.692 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Saturday 15 February 2025 11:44:38 -0500 (0:00:00.088) 0:01:36.780 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 15 February 2025 11:44:38 -0500 (0:00:00.136) 0:01:36.917 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "quadlet-pod-container", "Exec": "/bin/busybox-extras httpd -f -p 80", "Image": "quay.io/libpod/testimage:20210610", "Pod": "quadlet-pod-pod.pod" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 15 February 2025 11:44:39 -0500 (0:00:00.057) 0:01:36.974 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_pod" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 15 February 2025 11:44:39 -0500 (0:00:00.050) 0:01:37.025 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 15 February 2025 11:44:39 -0500 (0:00:00.040) 0:01:37.065 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-container", "__podman_quadlet_type": "container", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 15 February 2025 11:44:39 -0500 (0:00:00.055) 0:01:37.120 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 15 February 2025 11:44:39 -0500 (0:00:00.064) 0:01:37.185 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 15 February 2025 11:44:39 -0500 (0:00:00.041) 0:01:37.226 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 15 February 2025 11:44:39 -0500 (0:00:00.038) 0:01:37.265 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 15 February 2025 11:44:39 -0500 (0:00:00.048) 0:01:37.314 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637482.3586853, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1739637469.6276166, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9120230, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "657259727", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 15 February 2025 11:44:39 -0500 (0:00:00.389) 0:01:37.703 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.004169", "end": "2025-02-15 11:44:40.065337", "rc": 0, "start": "2025-02-15 11:44:40.061168" } STDOUT: 0: user_quadlet_pod 655360 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 15 February 2025 11:44:40 -0500 (0:00:00.386) 0:01:38.090 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.006146", "end": "2025-02-15 11:44:40.453022", "rc": 0, "start": "2025-02-15 11:44:40.446876" } STDOUT: 0: user_quadlet_pod 655360 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 15 February 2025 11:44:40 -0500 (0:00:00.389) 0:01:38.480 ***** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 655360 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 655360 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 15 February 2025 11:44:40 -0500 (0:00:00.051) 0:01:38.531 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 15 February 2025 11:44:40 -0500 (0:00:00.037) 0:01:38.569 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 15 February 2025 11:44:40 -0500 (0:00:00.037) 0:01:38.606 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 15 February 2025 11:44:40 -0500 (0:00:00.098) 0:01:38.705 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 15 February 2025 11:44:40 -0500 (0:00:00.037) 0:01:38.743 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 15 February 2025 11:44:40 -0500 (0:00:00.036) 0:01:38.779 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/libpod/testimage:20210610" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-container.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_pod", "__podman_xdg_runtime_dir": "/run/user/2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 15 February 2025 11:44:40 -0500 (0:00:00.062) 0:01:38.841 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_pod/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 15 February 2025 11:44:40 -0500 (0:00:00.037) 0:01:38.879 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 15 February 2025 11:44:40 -0500 (0:00:00.035) 0:01:38.914 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [ "quay.io/libpod/testimage:20210610" ], "__podman_quadlet_file": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 15 February 2025 11:44:41 -0500 (0:00:00.085) 0:01:39.000 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 15 February 2025 11:44:41 -0500 (0:00:00.042) 0:01:39.042 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 15 February 2025 11:44:41 -0500 (0:00:00.083) 0:01:39.126 ***** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 15 February 2025 11:44:41 -0500 (0:00:00.380) 0:01:39.507 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_rootless or __podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 15 February 2025 11:44:41 -0500 (0:00:00.040) 0:01:39.548 ***** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 15 February 2025 11:44:41 -0500 (0:00:00.383) 0:01:39.931 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 15 February 2025 11:44:42 -0500 (0:00:00.070) 0:01:40.002 ***** ok: [managed-node3] => { "changed": false, "path": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 15 February 2025 11:44:42 -0500 (0:00:00.436) 0:01:40.439 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_file_removed is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 15 February 2025 11:44:42 -0500 (0:00:00.056) 0:01:40.495 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 15 February 2025 11:44:42 -0500 (0:00:00.064) 0:01:40.559 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 15 February 2025 11:44:42 -0500 (0:00:00.077) 0:01:40.637 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 15 February 2025 11:44:42 -0500 (0:00:00.107) 0:01:40.745 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_rootless or __podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 15 February 2025 11:44:42 -0500 (0:00:00.045) 0:01:40.790 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 15 February 2025 11:44:42 -0500 (0:00:00.060) 0:01:40.851 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 15 February 2025 11:44:42 -0500 (0:00:00.037) 0:01:40.889 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 15 February 2025 11:44:42 -0500 (0:00:00.036) 0:01:40.925 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_pod" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 15 February 2025 11:44:43 -0500 (0:00:00.043) 0:01:40.969 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 15 February 2025 11:44:43 -0500 (0:00:00.048) 0:01:41.018 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 15 February 2025 11:44:43 -0500 (0:00:00.062) 0:01:41.081 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 15 February 2025 11:44:43 -0500 (0:00:00.064) 0:01:41.145 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 15 February 2025 11:44:43 -0500 (0:00:00.063) 0:01:41.209 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 15 February 2025 11:44:43 -0500 (0:00:00.064) 0:01:41.274 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 15 February 2025 11:44:43 -0500 (0:00:00.065) 0:01:41.339 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 15 February 2025 11:44:43 -0500 (0:00:00.065) 0:01:41.405 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 15 February 2025 11:44:43 -0500 (0:00:00.056) 0:01:41.462 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Pod": { "PodName": "quadlet-pod" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 15 February 2025 11:44:43 -0500 (0:00:00.078) 0:01:41.541 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_pod" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 15 February 2025 11:44:43 -0500 (0:00:00.070) 0:01:41.611 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 15 February 2025 11:44:43 -0500 (0:00:00.057) 0:01:41.668 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-pod", "__podman_quadlet_type": "pod", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 15 February 2025 11:44:43 -0500 (0:00:00.176) 0:01:41.844 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 15 February 2025 11:44:44 -0500 (0:00:00.111) 0:01:41.956 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 15 February 2025 11:44:44 -0500 (0:00:00.069) 0:01:42.026 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 15 February 2025 11:44:44 -0500 (0:00:00.063) 0:01:42.090 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 15 February 2025 11:44:44 -0500 (0:00:00.079) 0:01:42.169 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637482.3586853, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1739637469.6276166, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9120230, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "657259727", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 15 February 2025 11:44:45 -0500 (0:00:01.715) 0:01:43.884 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.004277", "end": "2025-02-15 11:44:46.275840", "rc": 0, "start": "2025-02-15 11:44:46.271563" } STDOUT: 0: user_quadlet_pod 655360 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 15 February 2025 11:44:46 -0500 (0:00:00.416) 0:01:44.300 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.005708", "end": "2025-02-15 11:44:46.661731", "rc": 0, "start": "2025-02-15 11:44:46.656023" } STDOUT: 0: user_quadlet_pod 655360 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 15 February 2025 11:44:46 -0500 (0:00:00.388) 0:01:44.688 ***** ok: [managed-node3] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 655360 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 655360 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 15 February 2025 11:44:46 -0500 (0:00:00.051) 0:01:44.740 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 15 February 2025 11:44:46 -0500 (0:00:00.038) 0:01:44.778 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 15 February 2025 11:44:46 -0500 (0:00:00.038) 0:01:44.816 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 15 February 2025 11:44:46 -0500 (0:00:00.036) 0:01:44.853 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 15 February 2025 11:44:46 -0500 (0:00:00.038) 0:01:44.892 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 15 February 2025 11:44:46 -0500 (0:00:00.036) 0:01:44.928 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-pod-pod.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_pod", "__podman_xdg_runtime_dir": "/run/user/2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 15 February 2025 11:44:47 -0500 (0:00:00.061) 0:01:44.990 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_pod/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 15 February 2025 11:44:47 -0500 (0:00:00.039) 0:01:45.029 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 15 February 2025 11:44:47 -0500 (0:00:00.033) 0:01:45.063 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 15 February 2025 11:44:47 -0500 (0:00:00.161) 0:01:45.224 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 15 February 2025 11:44:47 -0500 (0:00:00.067) 0:01:45.291 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 15 February 2025 11:44:47 -0500 (0:00:00.095) 0:01:45.387 ***** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 15 February 2025 11:44:47 -0500 (0:00:00.407) 0:01:45.794 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_rootless or __podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 15 February 2025 11:44:47 -0500 (0:00:00.063) 0:01:45.858 ***** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 15 February 2025 11:44:48 -0500 (0:00:00.391) 0:01:46.250 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 15 February 2025 11:44:48 -0500 (0:00:00.059) 0:01:46.309 ***** ok: [managed-node3] => { "changed": false, "path": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 15 February 2025 11:44:48 -0500 (0:00:00.417) 0:01:46.727 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_file_removed is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 15 February 2025 11:44:48 -0500 (0:00:00.036) 0:01:46.763 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 15 February 2025 11:44:48 -0500 (0:00:00.036) 0:01:46.800 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 15 February 2025 11:44:48 -0500 (0:00:00.046) 0:01:46.846 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 15 February 2025 11:44:48 -0500 (0:00:00.036) 0:01:46.883 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_rootless or __podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 15 February 2025 11:44:48 -0500 (0:00:00.039) 0:01:46.922 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 15 February 2025 11:44:49 -0500 (0:00:00.085) 0:01:47.008 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 15 February 2025 11:44:49 -0500 (0:00:00.050) 0:01:47.058 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 15 February 2025 11:44:49 -0500 (0:00:00.040) 0:01:47.099 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_pod" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 15 February 2025 11:44:49 -0500 (0:00:00.126) 0:01:47.225 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 15 February 2025 11:44:49 -0500 (0:00:00.038) 0:01:47.263 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 15 February 2025 11:44:49 -0500 (0:00:00.039) 0:01:47.303 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 15 February 2025 11:44:49 -0500 (0:00:00.037) 0:01:47.341 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 15 February 2025 11:44:49 -0500 (0:00:00.039) 0:01:47.380 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 15 February 2025 11:44:49 -0500 (0:00:00.042) 0:01:47.423 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 15 February 2025 11:44:49 -0500 (0:00:00.056) 0:01:47.479 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 15 February 2025 11:44:49 -0500 (0:00:00.054) 0:01:47.534 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Saturday 15 February 2025 11:44:49 -0500 (0:00:00.037) 0:01:47.571 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml for managed-node3 => (item=user_quadlet_pod) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:4 Saturday 15 February 2025 11:44:49 -0500 (0:00:00.107) 0:01:47.679 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_linger_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set cancel linger vars] *************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:11 Saturday 15 February 2025 11:44:49 -0500 (0:00:00.040) 0:01:47.719 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_xdg_runtime_dir": "/run/user/2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:16 Saturday 15 February 2025 11:44:49 -0500 (0:00:00.046) 0:01:47.765 ***** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Gather facts for containers] ********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:21 Saturday 15 February 2025 11:44:50 -0500 (0:00:00.388) 0:01:48.154 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Gather facts for networks] ************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:30 Saturday 15 February 2025 11:44:50 -0500 (0:00:00.034) 0:01:48.189 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Gather secrets] *********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:40 Saturday 15 February 2025 11:44:50 -0500 (0:00:00.035) 0:01:48.224 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger if no more resources are in use] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:50 Saturday 15 February 2025 11:44:50 -0500 (0:00:00.034) 0:01:48.259 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Wait for user session to exit closing state] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:62 Saturday 15 February 2025 11:44:50 -0500 (0:00:00.034) 0:01:48.294 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__cancel_linger is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop logind] ************************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:76 Saturday 15 February 2025 11:44:50 -0500 (0:00:00.124) 0:01:48.418 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__cancel_linger is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Wait for user session to exit closing state] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:81 Saturday 15 February 2025 11:44:50 -0500 (0:00:00.038) 0:01:48.457 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__cancel_linger is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Restart logind] *********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:92 Saturday 15 February 2025 11:44:50 -0500 (0:00:00.041) 0:01:48.498 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__cancel_linger is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Saturday 15 February 2025 11:44:50 -0500 (0:00:00.040) 0:01:48.539 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Saturday 15 February 2025 11:44:50 -0500 (0:00:00.036) 0:01:48.576 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Remove test user] ******************************************************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:168 Saturday 15 February 2025 11:44:50 -0500 (0:00:00.050) 0:01:48.626 ***** changed: [managed-node3] => { "changed": true, "force": false, "name": "user_quadlet_pod", "remove": false, "state": "absent" } TASK [Cleanup system - root] *************************************************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:173 Saturday 15 February 2025 11:44:51 -0500 (0:00:00.566) 0:01:49.193 ***** included: fedora.linux_system_roles.podman for managed-node3 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 15 February 2025 11:44:51 -0500 (0:00:00.139) 0:01:49.332 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 15 February 2025 11:44:51 -0500 (0:00:00.071) 0:01:49.404 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 15 February 2025 11:44:51 -0500 (0:00:00.048) 0:01:49.452 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 15 February 2025 11:44:51 -0500 (0:00:00.034) 0:01:49.487 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 15 February 2025 11:44:51 -0500 (0:00:00.036) 0:01:49.523 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 15 February 2025 11:44:51 -0500 (0:00:00.034) 0:01:49.558 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 15 February 2025 11:44:51 -0500 (0:00:00.036) 0:01:49.594 ***** ok: [managed-node3] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node3] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node3] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node3] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 15 February 2025 11:44:51 -0500 (0:00:00.076) 0:01:49.671 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 15 February 2025 11:44:52 -0500 (0:00:00.999) 0:01:50.670 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 15 February 2025 11:44:52 -0500 (0:00:00.036) 0:01:50.706 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 15 February 2025 11:44:52 -0500 (0:00:00.042) 0:01:50.749 ***** skipping: [managed-node3] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 15 February 2025 11:44:52 -0500 (0:00:00.035) 0:01:50.784 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 15 February 2025 11:44:52 -0500 (0:00:00.037) 0:01:50.821 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 15 February 2025 11:44:52 -0500 (0:00:00.036) 0:01:50.857 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.029655", "end": "2025-02-15 11:44:53.238615", "rc": 0, "start": "2025-02-15 11:44:53.208960" } STDOUT: podman version 5.3.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 15 February 2025 11:44:53 -0500 (0:00:00.417) 0:01:51.275 ***** ok: [managed-node3] => { "ansible_facts": { "podman_version": "5.3.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 15 February 2025 11:44:53 -0500 (0:00:00.058) 0:01:51.334 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 15 February 2025 11:44:53 -0500 (0:00:00.055) 0:01:51.389 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 15 February 2025 11:44:53 -0500 (0:00:00.069) 0:01:51.459 ***** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 15 February 2025 11:44:53 -0500 (0:00:00.076) 0:01:51.535 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_version is version(\"5.0\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 15 February 2025 11:44:53 -0500 (0:00:00.098) 0:01:51.634 ***** META: end_host conditional evaluated to False, continuing execution for managed-node3 skipping: [managed-node3] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node3" } MSG: end_host conditional evaluated to false, continuing execution for managed-node3 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 15 February 2025 11:44:53 -0500 (0:00:00.098) 0:01:51.732 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 15 February 2025 11:44:53 -0500 (0:00:00.167) 0:01:51.900 ***** ok: [managed-node3] => { "ansible_facts": { "getent_passwd": { "root": [ "x", "0", "0", "Super User", "/root", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 15 February 2025 11:44:54 -0500 (0:00:00.389) 0:01:52.289 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 15 February 2025 11:44:54 -0500 (0:00:00.040) 0:01:52.329 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 15 February 2025 11:44:54 -0500 (0:00:00.050) 0:01:52.380 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637482.3586853, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1739637469.6276166, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9120230, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "657259727", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 15 February 2025 11:44:54 -0500 (0:00:00.383) 0:01:52.763 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 15 February 2025 11:44:54 -0500 (0:00:00.036) 0:01:52.800 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 15 February 2025 11:44:54 -0500 (0:00:00.038) 0:01:52.838 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 15 February 2025 11:44:54 -0500 (0:00:00.036) 0:01:52.875 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 15 February 2025 11:44:54 -0500 (0:00:00.041) 0:01:52.916 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 15 February 2025 11:44:55 -0500 (0:00:00.044) 0:01:52.961 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 15 February 2025 11:44:55 -0500 (0:00:00.062) 0:01:53.023 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 15 February 2025 11:44:55 -0500 (0:00:00.063) 0:01:53.086 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 15 February 2025 11:44:55 -0500 (0:00:00.060) 0:01:53.147 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Saturday 15 February 2025 11:44:55 -0500 (0:00:00.088) 0:01:53.235 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 15 February 2025 11:44:55 -0500 (0:00:00.081) 0:01:53.317 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 15 February 2025 11:44:55 -0500 (0:00:00.035) 0:01:53.353 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Saturday 15 February 2025 11:44:55 -0500 (0:00:00.038) 0:01:53.391 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 15 February 2025 11:44:55 -0500 (0:00:00.136) 0:01:53.528 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 15 February 2025 11:44:55 -0500 (0:00:00.044) 0:01:53.573 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Saturday 15 February 2025 11:44:55 -0500 (0:00:00.036) 0:01:53.609 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Saturday 15 February 2025 11:44:55 -0500 (0:00:00.070) 0:01:53.679 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Saturday 15 February 2025 11:44:55 -0500 (0:00:00.035) 0:01:53.715 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Saturday 15 February 2025 11:44:55 -0500 (0:00:00.035) 0:01:53.750 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Saturday 15 February 2025 11:44:55 -0500 (0:00:00.072) 0:01:53.823 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Saturday 15 February 2025 11:44:55 -0500 (0:00:00.041) 0:01:53.864 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Saturday 15 February 2025 11:44:55 -0500 (0:00:00.050) 0:01:53.915 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Saturday 15 February 2025 11:44:56 -0500 (0:00:00.047) 0:01:53.962 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Saturday 15 February 2025 11:44:56 -0500 (0:00:00.033) 0:01:53.996 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Saturday 15 February 2025 11:44:56 -0500 (0:00:00.034) 0:01:54.031 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Saturday 15 February 2025 11:44:56 -0500 (0:00:00.035) 0:01:54.066 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Saturday 15 February 2025 11:44:56 -0500 (0:00:00.033) 0:01:54.100 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Saturday 15 February 2025 11:44:56 -0500 (0:00:00.089) 0:01:54.189 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Saturday 15 February 2025 11:44:56 -0500 (0:00:00.035) 0:01:54.224 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Saturday 15 February 2025 11:44:56 -0500 (0:00:00.034) 0:01:54.259 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Saturday 15 February 2025 11:44:56 -0500 (0:00:00.030) 0:01:54.289 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node3 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 15 February 2025 11:44:56 -0500 (0:00:00.096) 0:01:54.386 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "quadlet-pod-container", "Exec": "/bin/busybox-extras httpd -f -p 80", "Image": "quay.io/libpod/testimage:20210610", "Pod": "quadlet-pod-pod.pod" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 15 February 2025 11:44:56 -0500 (0:00:00.046) 0:01:54.432 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 15 February 2025 11:44:56 -0500 (0:00:00.043) 0:01:54.476 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 15 February 2025 11:44:56 -0500 (0:00:00.036) 0:01:54.512 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-container", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 15 February 2025 11:44:56 -0500 (0:00:00.051) 0:01:54.563 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 15 February 2025 11:44:56 -0500 (0:00:00.064) 0:01:54.628 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 15 February 2025 11:44:56 -0500 (0:00:00.039) 0:01:54.667 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 15 February 2025 11:44:56 -0500 (0:00:00.043) 0:01:54.711 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 15 February 2025 11:44:56 -0500 (0:00:00.049) 0:01:54.761 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637482.3586853, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1739637469.6276166, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9120230, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "657259727", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 15 February 2025 11:44:57 -0500 (0:00:00.387) 0:01:55.148 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 15 February 2025 11:44:57 -0500 (0:00:00.037) 0:01:55.186 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 15 February 2025 11:44:57 -0500 (0:00:00.036) 0:01:55.222 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 15 February 2025 11:44:57 -0500 (0:00:00.104) 0:01:55.326 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 15 February 2025 11:44:57 -0500 (0:00:00.037) 0:01:55.364 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 15 February 2025 11:44:57 -0500 (0:00:00.038) 0:01:55.403 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 15 February 2025 11:44:57 -0500 (0:00:00.037) 0:01:55.441 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 15 February 2025 11:44:57 -0500 (0:00:00.036) 0:01:55.477 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 15 February 2025 11:44:57 -0500 (0:00:00.036) 0:01:55.514 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/libpod/testimage:20210610" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-container.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 15 February 2025 11:44:57 -0500 (0:00:00.057) 0:01:55.572 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 15 February 2025 11:44:57 -0500 (0:00:00.038) 0:01:55.611 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 15 February 2025 11:44:57 -0500 (0:00:00.034) 0:01:55.646 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [ "quay.io/libpod/testimage:20210610" ], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-pod-container.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 15 February 2025 11:44:57 -0500 (0:00:00.077) 0:01:55.724 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 15 February 2025 11:44:57 -0500 (0:00:00.044) 0:01:55.768 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 15 February 2025 11:44:57 -0500 (0:00:00.079) 0:01:55.847 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 15 February 2025 11:44:57 -0500 (0:00:00.035) 0:01:55.883 ***** changed: [managed-node3] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-pod-container.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2025-02-15 11:43:22 EST", "ActiveEnterTimestampMonotonic": "922271848", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "quadlet-pod-pod-pod.service -.mount systemd-journald.socket system.slice basic.target network-online.target sysinit.target", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-02-15 11:43:21 EST", "AssertTimestampMonotonic": "922137352", "Before": "shutdown.target multi-user.target", "BindLogSockets": "no", "BindsTo": "quadlet-pod-pod-pod.service", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "101525000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-02-15 11:43:21 EST", "ConditionTimestampMonotonic": "922137349", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/quadlet-pod-container.service", "ControlGroupId": "10857", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuset io memory pids", "Description": "quadlet-pod-container.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveCPUs": "0-1", "EffectiveMemoryHigh": "3698225152", "EffectiveMemoryMax": "3698225152", "EffectiveMemoryNodes": "0", "EffectiveTasksMax": "22347", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "60221", "ExecMainStartTimestamp": "Sat 2025-02-15 11:43:22 EST", "ExecMainStartTimestampMonotonic": "922240001", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; ignore_errors=no ; start_time=[Sat 2025-02-15 11:43:21 EST] ; stop_time=[n/a] ; pid=60211 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; flags= ; start_time=[Sat 2025-02-15 11:43:21 EST] ; stop_time=[n/a] ; pid=60211 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-pod-container.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-pod-container.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-pod-container.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-pod-container.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-pod-container.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-container.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-02-15 11:43:21 EST", "InactiveExitTimestampMonotonic": "922151155", "InvocationID": "1f550fc8f34243468c2cd5f629f676be", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13967", "LimitNPROCSoft": "13967", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13967", "LimitSIGPENDINGSoft": "13967", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "60221", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3254923264", "MemoryCurrent": "868352", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "20959232", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-container.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "continue", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target -.mount system.slice", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-pod-container.container", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-02-15 11:43:22 EST", "StateChangeTimestampMonotonic": "922271848", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-container", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22347", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "multi-user.target quadlet-pod-pod-pod.service", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 15 February 2025 11:45:09 -0500 (0:00:11.183) 0:02:07.066 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637802.6695132, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "f0b5c8159fc3c65bf9310a371751609e4c1ba4c3", "ctime": 1739637800.5324993, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 109052103, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1739637800.2444975, "nlink": 1, "path": "/etc/containers/systemd/quadlet-pod-container.container", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 230, "uid": 0, "version": "1768848636", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 15 February 2025 11:45:09 -0500 (0:00:00.387) 0:02:07.454 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 15 February 2025 11:45:09 -0500 (0:00:00.127) 0:02:07.581 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 15 February 2025 11:45:09 -0500 (0:00:00.368) 0:02:07.950 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 15 February 2025 11:45:10 -0500 (0:00:00.053) 0:02:08.004 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 15 February 2025 11:45:10 -0500 (0:00:00.034) 0:02:08.038 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 15 February 2025 11:45:10 -0500 (0:00:00.036) 0:02:08.075 ***** changed: [managed-node3] => { "changed": true, "path": "/etc/containers/systemd/quadlet-pod-container.container", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 15 February 2025 11:45:10 -0500 (0:00:00.374) 0:02:08.450 ***** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 15 February 2025 11:45:11 -0500 (0:00:00.734) 0:02:09.184 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 15 February 2025 11:45:11 -0500 (0:00:00.427) 0:02:09.612 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 15 February 2025 11:45:11 -0500 (0:00:00.046) 0:02:09.658 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 15 February 2025 11:45:11 -0500 (0:00:00.037) 0:02:09.696 ***** changed: [managed-node3] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.057398", "end": "2025-02-15 11:45:12.103263", "rc": 0, "start": "2025-02-15 11:45:12.045865" } STDOUT: 29238fe0ea2d46765ca52e682e886bf4a885c74a7c84fe2d3c7f54495a5442b2 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 15 February 2025 11:45:12 -0500 (0:00:00.432) 0:02:10.129 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 15 February 2025 11:45:12 -0500 (0:00:00.060) 0:02:10.190 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 15 February 2025 11:45:12 -0500 (0:00:00.034) 0:02:10.225 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 15 February 2025 11:45:12 -0500 (0:00:00.033) 0:02:10.258 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 15 February 2025 11:45:12 -0500 (0:00:00.034) 0:02:10.292 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.033579", "end": "2025-02-15 11:45:12.680080", "rc": 0, "start": "2025-02-15 11:45:12.646501" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 15 February 2025 11:45:12 -0500 (0:00:00.411) 0:02:10.703 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.032101", "end": "2025-02-15 11:45:13.082741", "rc": 0, "start": "2025-02-15 11:45:13.050640" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 15 February 2025 11:45:13 -0500 (0:00:00.404) 0:02:11.107 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.030954", "end": "2025-02-15 11:45:13.489786", "rc": 0, "start": "2025-02-15 11:45:13.458832" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 15 February 2025 11:45:13 -0500 (0:00:00.467) 0:02:11.575 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.031089", "end": "2025-02-15 11:45:13.960743", "rc": 0, "start": "2025-02-15 11:45:13.929654" } STDOUT: podman TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 15 February 2025 11:45:14 -0500 (0:00:00.408) 0:02:11.983 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 15 February 2025 11:45:14 -0500 (0:00:00.409) 0:02:12.393 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 15 February 2025 11:45:14 -0500 (0:00:00.409) 0:02:12.803 ***** ok: [managed-node3] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "running", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "apt-daily.service": { "name": "apt-daily.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "inactive", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-pod-container.service": { "name": "quadlet-pod-container.service", "source": "systemd", "state": "stopped", "status": "failed" }, "quadlet-pod-pod-pod.service": { "name": "quadlet-pod-pod-pod.service", "source": "systemd", "state": "inactive", "status": "generated" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd-vsock@.service": { "name": "sshd-vsock@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-userdbd.service": { "name": "systemd-userdbd.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 15 February 2025 11:45:16 -0500 (0:00:02.129) 0:02:14.933 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 15 February 2025 11:45:17 -0500 (0:00:00.033) 0:02:14.966 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Pod": { "PodName": "quadlet-pod" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 15 February 2025 11:45:17 -0500 (0:00:00.046) 0:02:15.013 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 15 February 2025 11:45:17 -0500 (0:00:00.043) 0:02:15.056 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 15 February 2025 11:45:17 -0500 (0:00:00.035) 0:02:15.092 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-pod", "__podman_quadlet_type": "pod", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 15 February 2025 11:45:17 -0500 (0:00:00.050) 0:02:15.142 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 15 February 2025 11:45:17 -0500 (0:00:00.063) 0:02:15.206 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 15 February 2025 11:45:17 -0500 (0:00:00.039) 0:02:15.245 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 15 February 2025 11:45:17 -0500 (0:00:00.037) 0:02:15.283 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 15 February 2025 11:45:17 -0500 (0:00:00.047) 0:02:15.330 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637482.3586853, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1739637469.6276166, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9120230, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "657259727", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 15 February 2025 11:45:17 -0500 (0:00:00.377) 0:02:15.708 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 15 February 2025 11:45:17 -0500 (0:00:00.035) 0:02:15.744 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 15 February 2025 11:45:17 -0500 (0:00:00.096) 0:02:15.840 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 15 February 2025 11:45:17 -0500 (0:00:00.035) 0:02:15.876 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 15 February 2025 11:45:17 -0500 (0:00:00.036) 0:02:15.913 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 15 February 2025 11:45:17 -0500 (0:00:00.035) 0:02:15.948 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 15 February 2025 11:45:18 -0500 (0:00:00.036) 0:02:15.985 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 15 February 2025 11:45:18 -0500 (0:00:00.036) 0:02:16.021 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 15 February 2025 11:45:18 -0500 (0:00:00.034) 0:02:16.055 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-pod-pod.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 15 February 2025 11:45:18 -0500 (0:00:00.057) 0:02:16.113 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 15 February 2025 11:45:18 -0500 (0:00:00.036) 0:02:16.150 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 15 February 2025 11:45:18 -0500 (0:00:00.034) 0:02:16.184 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-pod-pod.pod", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 15 February 2025 11:45:18 -0500 (0:00:00.078) 0:02:16.262 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 15 February 2025 11:45:18 -0500 (0:00:00.040) 0:02:16.303 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 15 February 2025 11:45:18 -0500 (0:00:00.080) 0:02:16.384 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 15 February 2025 11:45:18 -0500 (0:00:00.033) 0:02:16.417 ***** changed: [managed-node3] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-pod-pod-pod.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "basic.target system.slice network-online.target systemd-journald.socket -.mount sysinit.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-pod-pod-pod.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698225152", "EffectiveMemoryMax": "3698225152", "EffectiveTasksMax": "22347", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/quadlet-pod-pod-pod.pod-id ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/quadlet-pod-pod-pod.pod-id ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPre": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/quadlet-pod-pod-pod.pid --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPreEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/quadlet-pod-pod-pod.pid --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --force ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --force ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-pod-pod-pod.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-pod-pod.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13967", "LimitNPROCSoft": "13967", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13967", "LimitSIGPENDINGSoft": "13967", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3249975296", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-pod-pod.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/quadlet-pod-pod-pod.pid", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "-.mount system.slice sysinit.target", "RequiresMountsFor": "/run/containers", "Restart": "on-failure", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-pod-pod.pod", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-pod-pod", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22347", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "forking", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 15 February 2025 11:45:19 -0500 (0:00:00.803) 0:02:17.220 ***** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1739637804.0795224, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "1884c880482430d8bf2e944b003734fb8b7a462d", "ctime": 1739637793.1824517, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 83886279, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1739637792.7554488, "nlink": 1, "path": "/etc/containers/systemd/quadlet-pod-pod.pod", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 70, "uid": 0, "version": "2351653310", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 15 February 2025 11:45:19 -0500 (0:00:00.392) 0:02:17.612 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 15 February 2025 11:45:19 -0500 (0:00:00.121) 0:02:17.734 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 15 February 2025 11:45:20 -0500 (0:00:00.372) 0:02:18.107 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 15 February 2025 11:45:20 -0500 (0:00:00.052) 0:02:18.159 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 15 February 2025 11:45:20 -0500 (0:00:00.035) 0:02:18.195 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 15 February 2025 11:45:20 -0500 (0:00:00.034) 0:02:18.229 ***** changed: [managed-node3] => { "changed": true, "path": "/etc/containers/systemd/quadlet-pod-pod.pod", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 15 February 2025 11:45:20 -0500 (0:00:00.382) 0:02:18.612 ***** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 15 February 2025 11:45:21 -0500 (0:00:00.745) 0:02:19.358 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 15 February 2025 11:45:21 -0500 (0:00:00.441) 0:02:19.800 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 15 February 2025 11:45:21 -0500 (0:00:00.046) 0:02:19.847 ***** ok: [managed-node3] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 15 February 2025 11:45:21 -0500 (0:00:00.034) 0:02:19.881 ***** changed: [managed-node3] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.038940", "end": "2025-02-15 11:45:22.280189", "rc": 0, "start": "2025-02-15 11:45:22.241249" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 15 February 2025 11:45:22 -0500 (0:00:00.427) 0:02:20.309 ***** included: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node3 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 15 February 2025 11:45:22 -0500 (0:00:00.061) 0:02:20.370 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 15 February 2025 11:45:22 -0500 (0:00:00.034) 0:02:20.404 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 15 February 2025 11:45:22 -0500 (0:00:00.032) 0:02:20.437 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 15 February 2025 11:45:22 -0500 (0:00:00.034) 0:02:20.471 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.031673", "end": "2025-02-15 11:45:22.857867", "rc": 0, "start": "2025-02-15 11:45:22.826194" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 15 February 2025 11:45:22 -0500 (0:00:00.412) 0:02:20.884 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.031021", "end": "2025-02-15 11:45:23.275607", "rc": 0, "start": "2025-02-15 11:45:23.244586" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 15 February 2025 11:45:23 -0500 (0:00:00.415) 0:02:21.300 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.032333", "end": "2025-02-15 11:45:23.690267", "rc": 0, "start": "2025-02-15 11:45:23.657934" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 15 February 2025 11:45:23 -0500 (0:00:00.484) 0:02:21.785 ***** ok: [managed-node3] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.031777", "end": "2025-02-15 11:45:24.175055", "rc": 0, "start": "2025-02-15 11:45:24.143278" } STDOUT: podman TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 15 February 2025 11:45:24 -0500 (0:00:00.415) 0:02:22.200 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 15 February 2025 11:45:24 -0500 (0:00:00.414) 0:02:22.615 ***** ok: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 15 February 2025 11:45:25 -0500 (0:00:00.415) 0:02:23.030 ***** ok: [managed-node3] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "apt-daily.service": { "name": "apt-daily.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "inactive", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-pod-container.service": { "name": "quadlet-pod-container.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd-vsock@.service": { "name": "sshd-vsock@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-userdbd.service": { "name": "systemd-userdbd.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 15 February 2025 11:45:27 -0500 (0:00:02.171) 0:02:25.201 ***** skipping: [managed-node3] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Saturday 15 February 2025 11:45:27 -0500 (0:00:00.034) 0:02:25.236 ***** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Saturday 15 February 2025 11:45:27 -0500 (0:00:00.032) 0:02:25.269 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Saturday 15 February 2025 11:45:27 -0500 (0:00:00.030) 0:02:25.300 ***** skipping: [managed-node3] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Ensure no resources] ***************************************************** task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:182 Saturday 15 February 2025 11:45:27 -0500 (0:00:00.050) 0:02:25.350 ***** fatal: [managed-node3]: FAILED! => { "assertion": "__podman_test_debug_secrets.stdout == \"\"", "changed": false, "evaluated_to": false } MSG: Assertion failed TASK [Dump journal] ************************************************************ task path: /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:194 Saturday 15 February 2025 11:45:27 -0500 (0:00:00.044) 0:02:25.395 ***** fatal: [managed-node3]: FAILED! => { "changed": false, "cmd": [ "journalctl", "-ex" ], "delta": "0:00:00.033962", "end": "2025-02-15 11:45:27.768105", "failed_when_result": true, "rc": 0, "start": "2025-02-15 11:45:27.734143" } STDOUT: Feb 15 11:41:37 managed-node3 kernel: veth0 (unregistering): left promiscuous mode Feb 15 11:41:37 managed-node3 kernel: podman1: port 1(veth0) entered disabled state Feb 15 11:41:37 managed-node3 quadlet-basic-mysql[40487]: 494f201e877ad97316781a3fd303710a86fe2356eaa916bbe3a7dcceb2f40e5a Feb 15 11:41:37 managed-node3 systemd[30182]: Stopped quadlet-basic-mysql.service. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 142 and the job result is done. Feb 15 11:41:37 managed-node3 systemd[30182]: quadlet-basic-mysql.service: Consumed 2.909s CPU time, 600.1M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit UNIT completed and consumed the indicated resources. Feb 15 11:41:37 managed-node3 sudo[40470]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:41:37 managed-node3 python3.12[40654]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:41:38 managed-node3 python3.12[40918]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Feb 15 11:41:39 managed-node3 sudo[41091]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ojanmukbahoqwvllvmgnapnvbojnixmr ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637698.9257834-18565-251074427403549/AnsiballZ_systemd.py' Feb 15 11:41:39 managed-node3 sudo[41091]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:41:39 managed-node3 python3.12[41094]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Feb 15 11:41:39 managed-node3 systemd[30182]: Reload requested from client PID 41095 ('systemctl')... Feb 15 11:41:39 managed-node3 systemd[30182]: Reloading... Feb 15 11:41:39 managed-node3 systemd[30182]: Reloading finished in 43 ms. Feb 15 11:41:39 managed-node3 sudo[41091]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:41:39 managed-node3 sudo[41278]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mfavwfabkkbxnfeeecyawyujzpdltotx ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637699.7313292-18598-136798455060684/AnsiballZ_command.py' Feb 15 11:41:39 managed-node3 sudo[41278]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:41:40 managed-node3 systemd[30182]: Started podman-41282.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 143. Feb 15 11:41:40 managed-node3 sudo[41278]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:41:40 managed-node3 sudo[41461]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-anrbmumpiygeblbhoiarirtwkfoioyrt ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637700.425923-18639-91764216579152/AnsiballZ_command.py' Feb 15 11:41:40 managed-node3 sudo[41461]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:41:40 managed-node3 python3.12[41464]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:41:40 managed-node3 systemd[30182]: Started podman-41465.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 147. Feb 15 11:41:41 managed-node3 sudo[41461]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:41:42 managed-node3 sudo[41644]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fdemqnlcffpoxdwmujbupholregpvpdt ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637702.1739323-18718-120579731152625/AnsiballZ_command.py' Feb 15 11:41:42 managed-node3 sudo[41644]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:41:42 managed-node3 python3.12[41647]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:41:42 managed-node3 systemd[30182]: Started podman-41648.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 151. Feb 15 11:41:42 managed-node3 sudo[41644]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:41:42 managed-node3 sudo[41828]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jlblrejahckiemunwigytrxdgyfkeqjm ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637702.7406769-18753-157716841818078/AnsiballZ_command.py' Feb 15 11:41:42 managed-node3 sudo[41828]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:41:43 managed-node3 python3.12[41831]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:41:43 managed-node3 systemd[30182]: Started podman-41832.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 155. Feb 15 11:41:43 managed-node3 sudo[41828]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:41:43 managed-node3 sudo[42011]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-umkrwkjrmgvlurfefkvgpjfxphslcofm ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637703.3073995-18776-11137384482482/AnsiballZ_command.py' Feb 15 11:41:43 managed-node3 sudo[42011]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:41:43 managed-node3 python3.12[42014]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:41:43 managed-node3 systemd[30182]: Started podman-42015.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 159. Feb 15 11:41:43 managed-node3 sudo[42011]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:41:44 managed-node3 sudo[42194]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qpmyzohabbebwfhahsdsgzfadhjnoibf ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637703.8289785-18810-143058717930371/AnsiballZ_command.py' Feb 15 11:41:44 managed-node3 sudo[42194]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:41:44 managed-node3 python3.12[42197]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:41:44 managed-node3 systemd[30182]: Started podman-42198.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 163. Feb 15 11:41:44 managed-node3 sudo[42194]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:41:44 managed-node3 sudo[42379]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-izlnqyakkfnzwijwbpssguecszkzcgps ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637704.3923075-18836-113181565130026/AnsiballZ_command.py' Feb 15 11:41:44 managed-node3 sudo[42379]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:41:44 managed-node3 systemd[30182]: Started podman-42383.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 167. Feb 15 11:41:44 managed-node3 sudo[42379]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:41:45 managed-node3 sudo[42563]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-heuptscwarkwzxeriexheyqhfnkumakz ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637704.9548993-18864-220332928170055/AnsiballZ_command.py' Feb 15 11:41:45 managed-node3 sudo[42563]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:41:45 managed-node3 systemd[30182]: Started podman-42567.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 171. Feb 15 11:41:45 managed-node3 sudo[42563]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:41:45 managed-node3 sudo[42746]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zkhkneuttigetvghmzgabtydvhostbtt ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637705.5042398-18895-278921686472951/AnsiballZ_service_facts.py' Feb 15 11:41:45 managed-node3 sudo[42746]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:41:46 managed-node3 python3.12[42749]: ansible-service_facts Invoked Feb 15 11:41:47 managed-node3 sudo[42746]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:41:48 managed-node3 python3.12[42990]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:41:49 managed-node3 python3.12[43123]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:41:49 managed-node3 python3.12[43255]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:41:51 managed-node3 python3.12[43387]: ansible-stat Invoked with path=/run/user/1111 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:41:51 managed-node3 sudo[43562]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cqntwbefkmviwlpdbkhtmceauptlbchk ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637711.4596128-19174-223846800983607/AnsiballZ_systemd.py' Feb 15 11:41:51 managed-node3 sudo[43562]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:41:51 managed-node3 python3.12[43565]: ansible-systemd Invoked with name=quadlet-basic-unused-volume-volume.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Feb 15 11:41:52 managed-node3 systemd[30182]: Reload requested from client PID 43568 ('systemctl')... Feb 15 11:41:52 managed-node3 systemd[30182]: Reloading... Feb 15 11:41:52 managed-node3 systemd[30182]: Reloading finished in 42 ms. Feb 15 11:41:52 managed-node3 systemd[30182]: Stopped quadlet-basic-unused-volume-volume.service. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 175 and the job result is done. Feb 15 11:41:52 managed-node3 sudo[43562]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:41:52 managed-node3 python3.12[43709]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:41:53 managed-node3 python3.12[43973]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Feb 15 11:41:54 managed-node3 sudo[44146]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vcaqwxhitjqknjlcmqxywnvpwgnqxxil ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637713.9676712-19299-11234130099191/AnsiballZ_systemd.py' Feb 15 11:41:54 managed-node3 sudo[44146]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:41:54 managed-node3 python3.12[44149]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Feb 15 11:41:54 managed-node3 systemd[30182]: Reload requested from client PID 44150 ('systemctl')... Feb 15 11:41:54 managed-node3 systemd[30182]: Reloading... Feb 15 11:41:54 managed-node3 systemd[30182]: Reloading finished in 41 ms. Feb 15 11:41:54 managed-node3 sudo[44146]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:41:54 managed-node3 sudo[44334]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tpdkzjqpxcglvmpfzehknqccxbbtqapv ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637714.6804442-19338-205181058169783/AnsiballZ_command.py' Feb 15 11:41:54 managed-node3 sudo[44334]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:41:55 managed-node3 systemd[30182]: Started podman-44338.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 176. Feb 15 11:41:55 managed-node3 sudo[44334]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:41:55 managed-node3 sudo[44519]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qpyzpjbhljhsapxaxeuinqjzonmehipr ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637715.5365481-19383-120234736846808/AnsiballZ_command.py' Feb 15 11:41:55 managed-node3 sudo[44519]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:41:55 managed-node3 python3.12[44522]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:41:55 managed-node3 systemd[30182]: Started podman-44523.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 180. Feb 15 11:41:55 managed-node3 sudo[44519]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:41:56 managed-node3 sudo[44702]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-osjydbvlzpofmavufsepruzdmerczktm ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637716.390834-19436-184945800944848/AnsiballZ_command.py' Feb 15 11:41:56 managed-node3 sudo[44702]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:41:56 managed-node3 python3.12[44705]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:41:56 managed-node3 systemd[30182]: Started podman-44706.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 184. Feb 15 11:41:56 managed-node3 sudo[44702]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:41:57 managed-node3 sudo[44887]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qfkwzrzjynxsdcksfmrnjchllwgokmct ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637716.8926985-19466-32164205135172/AnsiballZ_command.py' Feb 15 11:41:57 managed-node3 sudo[44887]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:41:57 managed-node3 python3.12[44890]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:41:57 managed-node3 systemd[30182]: Started podman-44891.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 188. Feb 15 11:41:57 managed-node3 sudo[44887]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:41:57 managed-node3 sudo[45070]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mklwdqaqzdsloxbpoeutbfeqvbjhbhww ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637717.4295435-19503-196885435035854/AnsiballZ_command.py' Feb 15 11:41:57 managed-node3 sudo[45070]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:41:57 managed-node3 python3.12[45073]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:41:57 managed-node3 systemd[30182]: Started podman-45074.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 192. Feb 15 11:41:57 managed-node3 sudo[45070]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:41:58 managed-node3 sudo[45253]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nsgbzfnsrdihpkhkwtbxgdsowdnhmfzh ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637717.9628787-19532-210387540848515/AnsiballZ_command.py' Feb 15 11:41:58 managed-node3 sudo[45253]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:41:58 managed-node3 python3.12[45256]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:41:58 managed-node3 systemd[30182]: Started podman-45257.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 196. Feb 15 11:41:58 managed-node3 sudo[45253]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:41:58 managed-node3 sudo[45437]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-usgwgsrxohkpznzexjqpuankjzkcvekr ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637718.4998047-19568-43325002241139/AnsiballZ_command.py' Feb 15 11:41:58 managed-node3 sudo[45437]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:41:58 managed-node3 systemd[30182]: Started podman-45441.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 200. Feb 15 11:41:58 managed-node3 sudo[45437]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:41:59 managed-node3 sudo[45620]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zpwzwcijpjozmhztjgyvtpgewnnrzwql ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637719.0455215-19605-229549073511339/AnsiballZ_command.py' Feb 15 11:41:59 managed-node3 sudo[45620]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:41:59 managed-node3 systemd[30182]: Started podman-45624.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 204. Feb 15 11:41:59 managed-node3 sudo[45620]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:41:59 managed-node3 sudo[45804]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lanlylipyepykehwyohszogjtulcabbv ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637719.6094308-19639-17691446923970/AnsiballZ_service_facts.py' Feb 15 11:41:59 managed-node3 sudo[45804]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:41:59 managed-node3 python3.12[45807]: ansible-service_facts Invoked Feb 15 11:42:02 managed-node3 sudo[45804]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:42:03 managed-node3 python3.12[46048]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:42:04 managed-node3 python3.12[46181]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:42:04 managed-node3 python3.12[46313]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:42:06 managed-node3 python3.12[46445]: ansible-stat Invoked with path=/run/user/1111 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:42:06 managed-node3 sudo[46620]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zkwkftrdairkrxwsjqvsmxzhsjwvzvbx ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637726.5170827-19999-218281601398528/AnsiballZ_systemd.py' Feb 15 11:42:06 managed-node3 sudo[46620]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:42:07 managed-node3 python3.12[46623]: ansible-systemd Invoked with name=quadlet-basic-mysql-volume.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Feb 15 11:42:07 managed-node3 systemd[30182]: Reload requested from client PID 46626 ('systemctl')... Feb 15 11:42:07 managed-node3 systemd[30182]: Reloading... Feb 15 11:42:07 managed-node3 systemd[30182]: Reloading finished in 39 ms. Feb 15 11:42:07 managed-node3 systemd[30182]: Stopped quadlet-basic-mysql-volume.service. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 208 and the job result is done. Feb 15 11:42:07 managed-node3 sudo[46620]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:42:07 managed-node3 python3.12[46767]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:42:08 managed-node3 python3.12[47031]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Feb 15 11:42:09 managed-node3 sudo[47204]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-grjrfxyythzflalcppwjstrefjklhdgf ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637729.07457-20128-116838781410790/AnsiballZ_systemd.py' Feb 15 11:42:09 managed-node3 sudo[47204]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:42:09 managed-node3 python3.12[47207]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Feb 15 11:42:09 managed-node3 systemd[30182]: Reload requested from client PID 47208 ('systemctl')... Feb 15 11:42:09 managed-node3 systemd[30182]: Reloading... Feb 15 11:42:09 managed-node3 systemd[30182]: Reloading finished in 39 ms. Feb 15 11:42:09 managed-node3 sudo[47204]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:42:10 managed-node3 sudo[47390]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-czfqexpnutmjezapmegiarvesqmilxwb ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637729.7731683-20155-235843843961843/AnsiballZ_command.py' Feb 15 11:42:10 managed-node3 sudo[47390]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:42:10 managed-node3 systemd[30182]: Started podman-47394.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 209. Feb 15 11:42:10 managed-node3 sudo[47390]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:42:10 managed-node3 sudo[47574]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-edssowyxmbaphcmozscnctozouykzoov ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637730.5093596-20197-31249289384011/AnsiballZ_command.py' Feb 15 11:42:10 managed-node3 sudo[47574]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:42:10 managed-node3 python3.12[47577]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:42:10 managed-node3 systemd[30182]: Started podman-47578.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 213. Feb 15 11:42:10 managed-node3 sudo[47574]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:42:11 managed-node3 sudo[47758]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-eegirvisbmedfvsaupqqipxipfwoafnh ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637731.3578608-20238-46977240569369/AnsiballZ_command.py' Feb 15 11:42:11 managed-node3 sudo[47758]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:42:11 managed-node3 python3.12[47761]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:42:11 managed-node3 systemd[30182]: Started podman-47762.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 217. Feb 15 11:42:11 managed-node3 sudo[47758]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:42:12 managed-node3 sudo[47942]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cjioprsnmlmwmtmzetrbclbefdttjpng ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637731.8718286-20259-135256305050028/AnsiballZ_command.py' Feb 15 11:42:12 managed-node3 sudo[47942]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:42:12 managed-node3 python3.12[47945]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:42:12 managed-node3 systemd[30182]: Started podman-47946.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 221. Feb 15 11:42:12 managed-node3 sudo[47942]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:42:12 managed-node3 sudo[48126]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-aciztcfckuyxewxaohxewoavxcrgtyve ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637732.3702705-20284-9786909425886/AnsiballZ_command.py' Feb 15 11:42:12 managed-node3 sudo[48126]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:42:12 managed-node3 python3.12[48129]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:42:12 managed-node3 systemd[30182]: Started podman-48130.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 225. Feb 15 11:42:12 managed-node3 sudo[48126]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:42:13 managed-node3 sudo[48310]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gezyoyfynxblwriyzfkiwhdaslbxvaiu ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637732.9069827-20303-257370822104852/AnsiballZ_command.py' Feb 15 11:42:13 managed-node3 sudo[48310]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:42:13 managed-node3 python3.12[48313]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:42:13 managed-node3 systemd[30182]: Started podman-48314.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 229. Feb 15 11:42:13 managed-node3 sudo[48310]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:42:13 managed-node3 sudo[48493]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xlkqljnrepdclhrcljunccoctjgcfhqt ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637733.4284756-20333-192223639165815/AnsiballZ_command.py' Feb 15 11:42:13 managed-node3 sudo[48493]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:42:13 managed-node3 systemd[30182]: Started podman-48497.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 233. Feb 15 11:42:13 managed-node3 sudo[48493]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:42:14 managed-node3 sudo[48676]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zaewurovlmgdxbvhjelbjckhkqvpfsyp ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637733.9570491-20354-154591030378332/AnsiballZ_command.py' Feb 15 11:42:14 managed-node3 sudo[48676]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:42:14 managed-node3 systemd[30182]: Started podman-48680.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 237. Feb 15 11:42:14 managed-node3 sudo[48676]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:42:14 managed-node3 sudo[48859]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-sefhyykxeeiikxbibfzvtenbpkhaijwv ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637734.4769793-20375-209928839006241/AnsiballZ_service_facts.py' Feb 15 11:42:14 managed-node3 sudo[48859]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:42:14 managed-node3 python3.12[48862]: ansible-service_facts Invoked Feb 15 11:42:16 managed-node3 sudo[48859]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:42:17 managed-node3 python3.12[49103]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:42:17 managed-node3 python3.12[49236]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:42:18 managed-node3 python3.12[49368]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:42:19 managed-node3 python3.12[49501]: ansible-stat Invoked with path=/run/user/1111 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:42:19 managed-node3 sudo[49676]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fymfnvaztcjitkgeehuonwcngvucqvka ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637739.4161153-20559-152624434419093/AnsiballZ_systemd.py' Feb 15 11:42:19 managed-node3 sudo[49676]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:42:19 managed-node3 python3.12[49679]: ansible-systemd Invoked with name=quadlet-basic-unused-network-network.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Feb 15 11:42:19 managed-node3 systemd[30182]: Reload requested from client PID 49682 ('systemctl')... Feb 15 11:42:19 managed-node3 systemd[30182]: Reloading... Feb 15 11:42:19 managed-node3 systemd[30182]: Reloading finished in 38 ms. Feb 15 11:42:19 managed-node3 systemd[30182]: Stopped quadlet-basic-unused-network-network.service. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 241 and the job result is done. Feb 15 11:42:19 managed-node3 sudo[49676]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:42:20 managed-node3 python3.12[49823]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:42:21 managed-node3 python3.12[50089]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Feb 15 11:42:21 managed-node3 sudo[50262]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rubcpgtkcfmbihcpkbhrivxgbrzgtjvu ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637741.6415298-20642-1096831369029/AnsiballZ_systemd.py' Feb 15 11:42:21 managed-node3 sudo[50262]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:42:22 managed-node3 python3.12[50265]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Feb 15 11:42:22 managed-node3 systemd[30182]: Reload requested from client PID 50266 ('systemctl')... Feb 15 11:42:22 managed-node3 systemd[30182]: Reloading... Feb 15 11:42:22 managed-node3 systemd[30182]: Reloading finished in 38 ms. Feb 15 11:42:22 managed-node3 sudo[50262]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:42:22 managed-node3 sudo[50448]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-enhkbycbhxplbgtzyzmopwwrhjctfpcz ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637742.3353992-20669-177322417203378/AnsiballZ_command.py' Feb 15 11:42:22 managed-node3 sudo[50448]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:42:22 managed-node3 systemd[30182]: Started podman-50452.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 242. Feb 15 11:42:22 managed-node3 sudo[50448]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:42:23 managed-node3 sudo[50631]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ubqhhklzylquuwohdpqqffyaqzwnrqeq ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637742.988875-20709-203977246722901/AnsiballZ_command.py' Feb 15 11:42:23 managed-node3 sudo[50631]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:42:23 managed-node3 python3.12[50634]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:42:23 managed-node3 systemd[30182]: Started podman-50635.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 246. Feb 15 11:42:23 managed-node3 sudo[50631]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:42:24 managed-node3 sudo[50814]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-oknliwwyxtszymdkuavrlzzmvbrzteqn ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637744.0790193-20747-224244801634927/AnsiballZ_command.py' Feb 15 11:42:24 managed-node3 sudo[50814]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:42:24 managed-node3 python3.12[50817]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:42:24 managed-node3 systemd[30182]: Started podman-50818.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 250. Feb 15 11:42:24 managed-node3 sudo[50814]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:42:24 managed-node3 sudo[50999]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nvmscqvxiiwauugxvkgslzexzkfgafgt ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637744.715151-20782-78395352037776/AnsiballZ_command.py' Feb 15 11:42:24 managed-node3 sudo[50999]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:42:25 managed-node3 python3.12[51002]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:42:25 managed-node3 systemd[30182]: Started podman-51003.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 254. Feb 15 11:42:25 managed-node3 sudo[50999]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:42:25 managed-node3 sudo[51182]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nmzyyzmaxdniisftlxqiygchlvoadtox ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637745.3862011-20814-39643817533226/AnsiballZ_command.py' Feb 15 11:42:25 managed-node3 sudo[51182]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:42:25 managed-node3 python3.12[51185]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:42:25 managed-node3 systemd[30182]: Started podman-51186.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 258. Feb 15 11:42:25 managed-node3 sudo[51182]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:42:26 managed-node3 sudo[51365]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-aujvzylrntrcjfdchdladocmjygbvaco ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637746.0314584-20847-276904836755191/AnsiballZ_command.py' Feb 15 11:42:26 managed-node3 sudo[51365]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:42:26 managed-node3 python3.12[51368]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:42:26 managed-node3 systemd[30182]: Started podman-51369.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 262. Feb 15 11:42:26 managed-node3 sudo[51365]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:42:26 managed-node3 sudo[51549]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nxichruawonfvqtzkkgumvqoyodmjgsa ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637746.6029541-20875-159960747418676/AnsiballZ_command.py' Feb 15 11:42:26 managed-node3 sudo[51549]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:42:27 managed-node3 systemd[30182]: Started podman-51553.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 266. Feb 15 11:42:27 managed-node3 sudo[51549]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:42:27 managed-node3 sudo[51732]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mpbzypqalwebibskiwebfzkifkorczhw ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637747.2353141-20911-142934208676617/AnsiballZ_command.py' Feb 15 11:42:27 managed-node3 sudo[51732]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:42:27 managed-node3 systemd[30182]: Started podman-51736.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 270. Feb 15 11:42:27 managed-node3 sudo[51732]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:42:27 managed-node3 sudo[51917]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-chauolzyimksyuiqgwyfrbdyejkkkmiu ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637747.752279-20936-20490978518772/AnsiballZ_service_facts.py' Feb 15 11:42:27 managed-node3 sudo[51917]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:42:28 managed-node3 python3.12[51920]: ansible-service_facts Invoked Feb 15 11:42:31 managed-node3 sudo[51917]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:42:32 managed-node3 python3.12[52161]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:42:33 managed-node3 python3.12[52294]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:42:33 managed-node3 python3.12[52426]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:42:34 managed-node3 python3.12[52558]: ansible-stat Invoked with path=/run/user/1111 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:42:35 managed-node3 sudo[52733]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ucykaxhdvmelpztlyzykncyvfupijdve ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637754.8347182-21173-1798870897153/AnsiballZ_systemd.py' Feb 15 11:42:35 managed-node3 sudo[52733]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:42:35 managed-node3 python3.12[52736]: ansible-systemd Invoked with name=quadlet-basic-network.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Feb 15 11:42:35 managed-node3 systemd[30182]: Reload requested from client PID 52739 ('systemctl')... Feb 15 11:42:35 managed-node3 systemd[30182]: Reloading... Feb 15 11:42:35 managed-node3 systemd[30182]: Reloading finished in 38 ms. Feb 15 11:42:35 managed-node3 systemd[30182]: Stopped quadlet-basic-network.service. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 274 and the job result is done. Feb 15 11:42:35 managed-node3 sudo[52733]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:42:36 managed-node3 python3.12[52880]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:42:37 managed-node3 python3.12[53144]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Feb 15 11:42:37 managed-node3 sudo[53317]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fsazvkuextgzlzajdzzauhbvfomjxutk ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637757.3903172-21286-252104499451058/AnsiballZ_systemd.py' Feb 15 11:42:37 managed-node3 sudo[53317]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:42:37 managed-node3 python3.12[53320]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Feb 15 11:42:37 managed-node3 systemd[30182]: Reload requested from client PID 53321 ('systemctl')... Feb 15 11:42:37 managed-node3 systemd[30182]: Reloading... Feb 15 11:42:37 managed-node3 systemd[30182]: Reloading finished in 37 ms. Feb 15 11:42:37 managed-node3 sudo[53317]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:42:38 managed-node3 sudo[53503]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fenkgvpneyiuyvogjigqvzhloeffncca ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637758.0435748-21300-5085355803788/AnsiballZ_command.py' Feb 15 11:42:38 managed-node3 sudo[53503]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:42:38 managed-node3 systemd[30182]: Started podman-53507.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 275. Feb 15 11:42:38 managed-node3 sudo[53503]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:42:38 managed-node3 sudo[53687]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-codpbzjaeeshvfxoetuiccrfwtevukyu ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637758.7396786-21334-204288734159447/AnsiballZ_command.py' Feb 15 11:42:38 managed-node3 sudo[53687]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:42:39 managed-node3 python3.12[53690]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:42:39 managed-node3 systemd[30182]: Started podman-53691.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 279. Feb 15 11:42:39 managed-node3 sudo[53687]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:42:39 managed-node3 sudo[53870]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zswlkoexslfeeffzjwfqtdtnkngsrzjd ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637759.667834-21384-237463365656687/AnsiballZ_command.py' Feb 15 11:42:39 managed-node3 sudo[53870]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:42:40 managed-node3 python3.12[53873]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:42:40 managed-node3 systemd[30182]: Started podman-53874.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 283. Feb 15 11:42:40 managed-node3 sudo[53870]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:42:40 managed-node3 sudo[54054]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ecllnvuzlrqxaiqmrhpvpdgdrqjutghx ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637760.2759964-21411-229585475569065/AnsiballZ_command.py' Feb 15 11:42:40 managed-node3 sudo[54054]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:42:40 managed-node3 python3.12[54057]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:42:40 managed-node3 systemd[30182]: Started podman-54058.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 287. Feb 15 11:42:40 managed-node3 sudo[54054]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:42:41 managed-node3 sudo[54238]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tpfkpvmhzskantpvyopoijbzjirunfyr ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637760.9611814-21440-68189986057030/AnsiballZ_command.py' Feb 15 11:42:41 managed-node3 sudo[54238]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:42:41 managed-node3 python3.12[54241]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:42:41 managed-node3 systemd[30182]: Started podman-54242.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 291. Feb 15 11:42:41 managed-node3 sudo[54238]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:42:41 managed-node3 sudo[54421]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fxrxvfbzdoaqqnnreqzgkjwjeatovuoo ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637761.5488966-21478-207908113977537/AnsiballZ_command.py' Feb 15 11:42:41 managed-node3 sudo[54421]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:42:41 managed-node3 python3.12[54424]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:42:41 managed-node3 systemd[30182]: Started podman-54425.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 295. Feb 15 11:42:41 managed-node3 sudo[54421]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:42:42 managed-node3 sudo[54604]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rycozknqxcbpwscnqwbbatwuajrecarc ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637762.1061885-21501-21476746100506/AnsiballZ_command.py' Feb 15 11:42:42 managed-node3 sudo[54604]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:42:42 managed-node3 systemd[30182]: Started podman-54608.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 299. Feb 15 11:42:42 managed-node3 sudo[54604]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:42:42 managed-node3 sudo[54788]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wvqndkzsadravdfcijyeqkdfokukcpur ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637762.6335125-21529-105391825015357/AnsiballZ_command.py' Feb 15 11:42:42 managed-node3 sudo[54788]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:42:43 managed-node3 systemd[30182]: Started podman-54793.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 303. Feb 15 11:42:43 managed-node3 sudo[54788]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:42:43 managed-node3 sudo[54973]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-uhtaixgzsvkbfmjufhihborvtmimejjn ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637763.1726034-21546-1384560073322/AnsiballZ_service_facts.py' Feb 15 11:42:43 managed-node3 sudo[54973]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:42:43 managed-node3 python3.12[54977]: ansible-service_facts Invoked Feb 15 11:42:45 managed-node3 sudo[54973]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:42:45 managed-node3 python3.12[55219]: ansible-stat Invoked with path=/run/user/1111 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:42:46 managed-node3 sudo[55394]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-abthivnienljdmoitttbtasdrqewlefn ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637765.79473-21596-152127539669812/AnsiballZ_podman_container_info.py' Feb 15 11:42:46 managed-node3 sudo[55394]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:42:46 managed-node3 python3.12[55397]: ansible-containers.podman.podman_container_info Invoked with executable=podman name=None Feb 15 11:42:46 managed-node3 systemd[30182]: Started podman-55398.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 307. Feb 15 11:42:46 managed-node3 sudo[55394]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:42:46 managed-node3 sudo[55578]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rjvqgybpjlgensxofmehxsnuqjagtkep ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637766.4039638-21610-163889232813428/AnsiballZ_command.py' Feb 15 11:42:46 managed-node3 sudo[55578]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:42:46 managed-node3 python3.12[55581]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:42:46 managed-node3 systemd[30182]: Started podman-55582.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 311. Feb 15 11:42:46 managed-node3 sudo[55578]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:42:47 managed-node3 sudo[55761]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kxtkzdgrsezojzkcpiggdwnwsxwygldz ; XDG_RUNTIME_DIR=/run/user/1111 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637766.8897095-21627-8577246787294/AnsiballZ_command.py' Feb 15 11:42:47 managed-node3 sudo[55761]: pam_unix(sudo:session): session opened for user user_quadlet_basic(uid=1111) by root(uid=0) Feb 15 11:42:47 managed-node3 python3.12[55764]: ansible-ansible.legacy.command Invoked with _raw_params=podman secret ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:42:47 managed-node3 systemd[30182]: Started podman-55765.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 315. Feb 15 11:42:47 managed-node3 sudo[55761]: pam_unix(sudo:session): session closed for user user_quadlet_basic Feb 15 11:42:47 managed-node3 python3.12[55903]: ansible-ansible.legacy.command Invoked with removes=/var/lib/systemd/linger/user_quadlet_basic _raw_params=loginctl disable-linger user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None stdin=None Feb 15 11:42:47 managed-node3 systemd[30182]: Activating special unit exit.target... Feb 15 11:42:47 managed-node3 systemd[1]: Stopping user@1111.service - User Manager for UID 1111... ░░ Subject: A stop job for unit user@1111.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user@1111.service has begun execution. ░░ ░░ The job identifier is 1813. Feb 15 11:42:47 managed-node3 systemd[30182]: Stopping podman-pause-4dbdf3f8.scope... ░░ Subject: A stop job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has begun execution. ░░ ░░ The job identifier is 336. Feb 15 11:42:47 managed-node3 systemd[30182]: Stopped target default.target - Main User Target. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 334 and the job result is done. Feb 15 11:42:47 managed-node3 systemd[30182]: Stopped podman-user-wait-network-online.service - Wait for system level network-online.target as user.. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 328 and the job result is done. Feb 15 11:42:47 managed-node3 systemd[30182]: Stopped target basic.target - Basic System. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 333 and the job result is done. Feb 15 11:42:47 managed-node3 systemd[30182]: Stopped target paths.target - Paths. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 339 and the job result is done. Feb 15 11:42:47 managed-node3 systemd[30182]: Stopped target sockets.target - Sockets. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 337 and the job result is done. Feb 15 11:42:47 managed-node3 systemd[30182]: Stopped target timers.target - Timers. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 338 and the job result is done. Feb 15 11:42:47 managed-node3 systemd[30182]: Stopped grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 330 and the job result is done. Feb 15 11:42:47 managed-node3 systemd[30182]: Stopped systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 325 and the job result is done. Feb 15 11:42:47 managed-node3 dbus-broker[30527]: Dispatched 4395 messages @ 2(±11)μs / message. ░░ Subject: Dispatched 4395 messages ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ This message is printed by dbus-broker when shutting down. It includes metric ░░ information collected during the runtime of dbus-broker. ░░ ░░ The message lists the number of dispatched messages ░░ (in this case 4395) as well as the mean time to ░░ handling a single message. The time measurements exclude the time spent on ░░ writing to and reading from the kernel. Feb 15 11:42:47 managed-node3 systemd[30182]: Stopping dbus-broker.service - D-Bus User Message Bus... ░░ Subject: A stop job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has begun execution. ░░ ░░ The job identifier is 324. Feb 15 11:42:47 managed-node3 systemd[30182]: Stopped systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 326 and the job result is done. Feb 15 11:42:47 managed-node3 systemd[30182]: Stopped podman-pause-4dbdf3f8.scope. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 336 and the job result is done. Feb 15 11:42:47 managed-node3 systemd[30182]: Stopped dbus-broker.service - D-Bus User Message Bus. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 324 and the job result is done. Feb 15 11:42:47 managed-node3 systemd[30182]: Removed slice session.slice - User Core Session Slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 340 and the job result is done. Feb 15 11:42:47 managed-node3 systemd[30182]: Removed slice user.slice - Slice /user. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 335 and the job result is done. Feb 15 11:42:47 managed-node3 systemd[30182]: user.slice: Consumed 8.869s CPU time, 469.6M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit UNIT completed and consumed the indicated resources. Feb 15 11:42:47 managed-node3 systemd[30182]: Closed dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 323 and the job result is done. Feb 15 11:42:47 managed-node3 systemd[30182]: Removed slice app.slice - User Application Slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 327 and the job result is done. Feb 15 11:42:47 managed-node3 systemd[30182]: app.slice: Consumed 3.082s CPU time, 600.8M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit UNIT completed and consumed the indicated resources. Feb 15 11:42:47 managed-node3 systemd[30182]: Reached target shutdown.target - Shutdown. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 322. Feb 15 11:42:47 managed-node3 systemd[30182]: Finished systemd-exit.service - Exit the Session. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 320. Feb 15 11:42:47 managed-node3 systemd[30182]: Reached target exit.target - Exit the Session. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 319. Feb 15 11:42:47 managed-node3 systemd[1]: user@1111.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user@1111.service has successfully entered the 'dead' state. Feb 15 11:42:47 managed-node3 systemd[1]: Stopped user@1111.service - User Manager for UID 1111. ░░ Subject: A stop job for unit user@1111.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user@1111.service has finished. ░░ ░░ The job identifier is 1813 and the job result is done. Feb 15 11:42:47 managed-node3 systemd[1]: user@1111.service: Consumed 13.221s CPU time, 922.7M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user@1111.service completed and consumed the indicated resources. Feb 15 11:42:47 managed-node3 systemd[1]: Stopping user-runtime-dir@1111.service - User Runtime Directory /run/user/1111... ░░ Subject: A stop job for unit user-runtime-dir@1111.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-runtime-dir@1111.service has begun execution. ░░ ░░ The job identifier is 1812. Feb 15 11:42:47 managed-node3 systemd[1]: run-user-1111.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-user-1111.mount has successfully entered the 'dead' state. Feb 15 11:42:47 managed-node3 systemd[1]: user-runtime-dir@1111.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user-runtime-dir@1111.service has successfully entered the 'dead' state. Feb 15 11:42:47 managed-node3 systemd[1]: Stopped user-runtime-dir@1111.service - User Runtime Directory /run/user/1111. ░░ Subject: A stop job for unit user-runtime-dir@1111.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-runtime-dir@1111.service has finished. ░░ ░░ The job identifier is 1812 and the job result is done. Feb 15 11:42:47 managed-node3 systemd-logind[662]: Removed session 7. ░░ Subject: Session 7 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 7 has been terminated. Feb 15 11:42:47 managed-node3 systemd[1]: Removed slice user-1111.slice - User Slice of UID 1111. ░░ Subject: A stop job for unit user-1111.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-1111.slice has finished. ░░ ░░ The job identifier is 1814 and the job result is done. Feb 15 11:42:47 managed-node3 systemd[1]: user-1111.slice: Consumed 13.249s CPU time, 922.8M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user-1111.slice completed and consumed the indicated resources. Feb 15 11:42:48 managed-node3 python3.12[56040]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:42:48 managed-node3 python3.12[56172]: ansible-ansible.legacy.systemd Invoked with name=systemd-logind state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Feb 15 11:42:48 managed-node3 systemd[1]: Stopping systemd-logind.service - User Login Management... ░░ Subject: A stop job for unit systemd-logind.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-logind.service has begun execution. ░░ ░░ The job identifier is 1816. Feb 15 11:42:48 managed-node3 systemd[1]: systemd-logind.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-logind.service has successfully entered the 'dead' state. Feb 15 11:42:48 managed-node3 systemd[1]: Stopped systemd-logind.service - User Login Management. ░░ Subject: A stop job for unit systemd-logind.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-logind.service has finished. ░░ ░░ The job identifier is 1816 and the job result is done. Feb 15 11:42:49 managed-node3 python3.12[56318]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State user_quadlet_basic _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:42:49 managed-node3 systemd[1]: Starting modprobe@drm.service - Load Kernel Module drm... ░░ Subject: A start job for unit modprobe@drm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@drm.service has begun execution. ░░ ░░ The job identifier is 1896. Feb 15 11:42:49 managed-node3 systemd[1]: modprobe@drm.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit modprobe@drm.service has successfully entered the 'dead' state. Feb 15 11:42:49 managed-node3 systemd[1]: Finished modprobe@drm.service - Load Kernel Module drm. ░░ Subject: A start job for unit modprobe@drm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@drm.service has finished successfully. ░░ ░░ The job identifier is 1896. Feb 15 11:42:49 managed-node3 systemd[1]: Starting systemd-logind.service - User Login Management... ░░ Subject: A start job for unit systemd-logind.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-logind.service has begun execution. ░░ ░░ The job identifier is 1817. Feb 15 11:42:49 managed-node3 systemd-logind[56322]: New seat seat0. ░░ Subject: A new seat seat0 is now available ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new seat seat0 has been configured and is now available. Feb 15 11:42:49 managed-node3 systemd-logind[56322]: Watching system buttons on /dev/input/event0 (Power Button) Feb 15 11:42:49 managed-node3 systemd-logind[56322]: Watching system buttons on /dev/input/event1 (Sleep Button) Feb 15 11:42:49 managed-node3 systemd-logind[56322]: Watching system buttons on /dev/input/event2 (AT Translated Set 2 keyboard) Feb 15 11:42:49 managed-node3 systemd[1]: Started systemd-logind.service - User Login Management. ░░ Subject: A start job for unit systemd-logind.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-logind.service has finished successfully. ░░ ░░ The job identifier is 1817. Feb 15 11:42:49 managed-node3 python3.12[56459]: ansible-stat Invoked with path=/var/lib/systemd/linger/user_quadlet_basic follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:42:51 managed-node3 python3.12[56721]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:42:52 managed-node3 python3.12[56858]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Feb 15 11:42:53 managed-node3 python3.12[56990]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:42:57 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Feb 15 11:42:57 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Feb 15 11:42:58 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Feb 15 11:42:59 managed-node3 systemd[1]: Stopping session-3.scope - Session 3 of User root... ░░ Subject: A stop job for unit session-3.scope has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit session-3.scope has begun execution. ░░ ░░ The job identifier is 1980. Feb 15 11:42:59 managed-node3 sshd-session[4424]: error: mm_reap: preauth child terminated by signal 15 Feb 15 11:42:59 managed-node3 systemd[1]: Stopping session-6.scope - Session 6 of User root... ░░ Subject: A stop job for unit session-6.scope has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit session-6.scope has begun execution. ░░ ░░ The job identifier is 1981. Feb 15 11:42:59 managed-node3 sshd-session[6620]: error: mm_reap: preauth child terminated by signal 15 Feb 15 11:42:59 managed-node3 sshd-session[4424]: pam_systemd(sshd:session): Failed to release session: No session '3' known Feb 15 11:42:59 managed-node3 sshd-session[6620]: pam_systemd(sshd:session): Failed to release session: No session '6' known Feb 15 11:42:59 managed-node3 sshd-session[4424]: pam_unix(sshd:session): session closed for user root Feb 15 11:42:59 managed-node3 sshd-session[6620]: pam_unix(sshd:session): session closed for user root Feb 15 11:42:59 managed-node3 systemd[1]: session-3.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-3.scope has successfully entered the 'dead' state. Feb 15 11:42:59 managed-node3 systemd[1]: Stopped session-3.scope - Session 3 of User root. ░░ Subject: A stop job for unit session-3.scope has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit session-3.scope has finished. ░░ ░░ The job identifier is 1980 and the job result is done. Feb 15 11:42:59 managed-node3 systemd[1]: session-3.scope: Consumed 3.374s CPU time, 86.7M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-3.scope completed and consumed the indicated resources. Feb 15 11:42:59 managed-node3 systemd[1]: session-6.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-6.scope has successfully entered the 'dead' state. Feb 15 11:42:59 managed-node3 systemd[1]: Stopped session-6.scope - Session 6 of User root. ░░ Subject: A stop job for unit session-6.scope has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit session-6.scope has finished. ░░ ░░ The job identifier is 1981 and the job result is done. Feb 15 11:42:59 managed-node3 systemd[1]: session-6.scope: Consumed 3min 48.564s CPU time, 438.5M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-6.scope completed and consumed the indicated resources. Feb 15 11:42:59 managed-node3 systemd[1]: Stopping user@0.service - User Manager for UID 0... ░░ Subject: A stop job for unit user@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user@0.service has begun execution. ░░ ░░ The job identifier is 1979. Feb 15 11:42:59 managed-node3 systemd[4432]: Activating special unit exit.target... Feb 15 11:42:59 managed-node3 systemd[4432]: Removed slice background.slice - User Background Tasks Slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 23 and the job result is done. Feb 15 11:42:59 managed-node3 systemd[4432]: Stopped target default.target - Main User Target. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 26 and the job result is done. Feb 15 11:42:59 managed-node3 systemd[4432]: Stopped target basic.target - Basic System. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 25 and the job result is done. Feb 15 11:42:59 managed-node3 systemd[4432]: Stopped target paths.target - Paths. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 31 and the job result is done. Feb 15 11:42:59 managed-node3 systemd[4432]: Stopped target sockets.target - Sockets. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 33 and the job result is done. Feb 15 11:42:59 managed-node3 systemd[4432]: Stopped target timers.target - Timers. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 34 and the job result is done. Feb 15 11:42:59 managed-node3 systemd[4432]: Stopped systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 35 and the job result is done. Feb 15 11:42:59 managed-node3 systemd[4432]: Closed dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 29 and the job result is done. Feb 15 11:42:59 managed-node3 systemd[4432]: Stopped systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 22 and the job result is done. Feb 15 11:42:59 managed-node3 systemd[4432]: Removed slice app.slice - User Application Slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 28 and the job result is done. Feb 15 11:42:59 managed-node3 systemd[4432]: Reached target shutdown.target - Shutdown. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 20. Feb 15 11:42:59 managed-node3 systemd[4432]: Finished systemd-exit.service - Exit the Session. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 18. Feb 15 11:42:59 managed-node3 systemd[4432]: Reached target exit.target - Exit the Session. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 17. Feb 15 11:42:59 managed-node3 systemd[1]: user@0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user@0.service has successfully entered the 'dead' state. Feb 15 11:42:59 managed-node3 systemd[1]: Stopped user@0.service - User Manager for UID 0. ░░ Subject: A stop job for unit user@0.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user@0.service has finished. ░░ ░░ The job identifier is 1979 and the job result is done. Feb 15 11:42:59 managed-node3 systemd[1]: Stopping user-runtime-dir@0.service - User Runtime Directory /run/user/0... ░░ Subject: A stop job for unit user-runtime-dir@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-runtime-dir@0.service has begun execution. ░░ ░░ The job identifier is 1978. Feb 15 11:42:59 managed-node3 systemd[1]: run-user-0.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-user-0.mount has successfully entered the 'dead' state. Feb 15 11:42:59 managed-node3 systemd[1]: user-runtime-dir@0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user-runtime-dir@0.service has successfully entered the 'dead' state. Feb 15 11:42:59 managed-node3 systemd[1]: Stopped user-runtime-dir@0.service - User Runtime Directory /run/user/0. ░░ Subject: A stop job for unit user-runtime-dir@0.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-runtime-dir@0.service has finished. ░░ ░░ The job identifier is 1978 and the job result is done. Feb 15 11:42:59 managed-node3 systemd[1]: Removed slice user-0.slice - User Slice of UID 0. ░░ Subject: A stop job for unit user-0.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-0.slice has finished. ░░ ░░ The job identifier is 1982 and the job result is done. Feb 15 11:42:59 managed-node3 systemd[1]: user-0.slice: Consumed 3min 52.257s CPU time, 504.1M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user-0.slice completed and consumed the indicated resources. Feb 15 11:42:59 managed-node3 sshd-session[57262]: Accepted publickey for root from 10.31.42.96 port 54592 ssh2: RSA SHA256:9j1blwt3wcrRiGYZQ7ZGu9axm3cDklH6/z4c+Ee8CzE Feb 15 11:42:59 managed-node3 systemd-logind[56322]: New session 8 of user root. ░░ Subject: A new session 8 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 8 has been created for the user root. ░░ ░░ The leading process of the session is 57262. Feb 15 11:42:59 managed-node3 systemd[1]: Created slice user-0.slice - User Slice of UID 0. ░░ Subject: A start job for unit user-0.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-0.slice has finished successfully. ░░ ░░ The job identifier is 2062. Feb 15 11:42:59 managed-node3 systemd[1]: Starting user-runtime-dir@0.service - User Runtime Directory /run/user/0... ░░ Subject: A start job for unit user-runtime-dir@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has begun execution. ░░ ░░ The job identifier is 1984. Feb 15 11:42:59 managed-node3 systemd[1]: Finished user-runtime-dir@0.service - User Runtime Directory /run/user/0. ░░ Subject: A start job for unit user-runtime-dir@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has finished successfully. ░░ ░░ The job identifier is 1984. Feb 15 11:42:59 managed-node3 systemd[1]: Starting user@0.service - User Manager for UID 0... ░░ Subject: A start job for unit user@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has begun execution. ░░ ░░ The job identifier is 2064. Feb 15 11:42:59 managed-node3 systemd-logind[56322]: New session 9 of user root. ░░ Subject: A new session 9 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 9 has been created for the user root. ░░ ░░ The leading process of the session is 57270. Feb 15 11:42:59 managed-node3 (systemd)[57270]: pam_unix(systemd-user:session): session opened for user root(uid=0) by root(uid=0) Feb 15 11:42:59 managed-node3 systemd[57270]: Queued start job for default target default.target. Feb 15 11:42:59 managed-node3 systemd[57270]: Created slice app.slice - User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 9. Feb 15 11:42:59 managed-node3 systemd[57270]: grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes was skipped because of an unmet condition check (ConditionUser=!@system). ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 7. Feb 15 11:42:59 managed-node3 systemd[57270]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 6. Feb 15 11:42:59 managed-node3 systemd[57270]: Reached target paths.target - Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Feb 15 11:42:59 managed-node3 systemd[57270]: Reached target timers.target - Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 5. Feb 15 11:42:59 managed-node3 systemd[57270]: Starting dbus.socket - D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 12. Feb 15 11:42:59 managed-node3 systemd[57270]: Starting systemd-tmpfiles-setup.service - Create User Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 8. Feb 15 11:42:59 managed-node3 systemd[57270]: Listening on dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Feb 15 11:42:59 managed-node3 systemd[57270]: Finished systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 8. Feb 15 11:42:59 managed-node3 systemd[57270]: Reached target sockets.target - Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 11. Feb 15 11:42:59 managed-node3 systemd[57270]: Reached target basic.target - Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Feb 15 11:42:59 managed-node3 systemd[57270]: Reached target default.target - Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Feb 15 11:42:59 managed-node3 systemd[57270]: Startup finished in 109ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 0 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 109243 microseconds. Feb 15 11:42:59 managed-node3 systemd[1]: Started user@0.service - User Manager for UID 0. ░░ Subject: A start job for unit user@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has finished successfully. ░░ ░░ The job identifier is 2064. Feb 15 11:42:59 managed-node3 systemd[1]: Started session-8.scope - Session 8 of User root. ░░ Subject: A start job for unit session-8.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-8.scope has finished successfully. ░░ ░░ The job identifier is 2145. Feb 15 11:42:59 managed-node3 sshd-session[57262]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Feb 15 11:43:02 managed-node3 systemd[1]: Starting systemd-tmpfiles-clean.service - Cleanup of Temporary Directories... ░░ Subject: A start job for unit systemd-tmpfiles-clean.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-clean.service has begun execution. ░░ ░░ The job identifier is 2227. Feb 15 11:43:02 managed-node3 systemd[1]: systemd-tmpfiles-clean.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-tmpfiles-clean.service has successfully entered the 'dead' state. Feb 15 11:43:02 managed-node3 systemd[1]: Finished systemd-tmpfiles-clean.service - Cleanup of Temporary Directories. ░░ Subject: A start job for unit systemd-tmpfiles-clean.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-clean.service has finished successfully. ░░ ░░ The job identifier is 2227. Feb 15 11:43:03 managed-node3 python3.12[57462]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Feb 15 11:43:04 managed-node3 python3.12[57623]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:43:04 managed-node3 python3.12[57754]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:43:06 managed-node3 python3.12[58016]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:43:07 managed-node3 python3.12[58153]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Feb 15 11:43:07 managed-node3 python3.12[58285]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:43:10 managed-node3 python3.12[58418]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:43:12 managed-node3 python3.12[58551]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Feb 15 11:43:12 managed-node3 python3.12[58682]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-pod-pod.pod follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Feb 15 11:43:13 managed-node3 python3.12[58787]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1739637792.3790746-22752-149265129293098/.source.pod dest=/etc/containers/systemd/quadlet-pod-pod.pod owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=1884c880482430d8bf2e944b003734fb8b7a462d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Feb 15 11:43:13 managed-node3 python3.12[58918]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Feb 15 11:43:13 managed-node3 systemd[1]: Reload requested from client PID 58919 ('systemctl') (unit session-8.scope)... Feb 15 11:43:13 managed-node3 systemd[1]: Reloading... Feb 15 11:43:14 managed-node3 systemd-rc-local-generator[58960]: /etc/rc.d/rc.local is not marked executable, skipping. Feb 15 11:43:14 managed-node3 systemd[1]: Reloading finished in 205 ms. Feb 15 11:43:14 managed-node3 systemd[1]: Starting logrotate.service - Rotate log files... ░░ Subject: A start job for unit logrotate.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has begun execution. ░░ ░░ The job identifier is 2234. Feb 15 11:43:14 managed-node3 systemd[1]: logrotate.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit logrotate.service has successfully entered the 'dead' state. Feb 15 11:43:14 managed-node3 systemd[1]: Finished logrotate.service - Rotate log files. ░░ Subject: A start job for unit logrotate.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has finished successfully. ░░ ░░ The job identifier is 2234. Feb 15 11:43:14 managed-node3 python3.12[59106]: ansible-systemd Invoked with name=quadlet-pod-pod-pod.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Feb 15 11:43:14 managed-node3 systemd[1]: Starting quadlet-pod-pod-pod.service... ░░ Subject: A start job for unit quadlet-pod-pod-pod.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-pod-pod.service has begun execution. ░░ ░░ The job identifier is 2312. Feb 15 11:43:14 managed-node3 systemd[1]: var-lib-containers-storage-overlay-compat3327028360-lower\x2dmapped.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-compat3327028360-lower\x2dmapped.mount has successfully entered the 'dead' state. Feb 15 11:43:15 managed-node3 podman[59110]: 2025-02-15 11:43:15.138475834 -0500 EST m=+0.384976604 image build 29238fe0ea2d46765ca52e682e886bf4a885c74a7c84fe2d3c7f54495a5442b2 Feb 15 11:43:15 managed-node3 systemd[1]: Created slice machine.slice - Slice /machine. ░░ Subject: A start job for unit machine.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine.slice has finished successfully. ░░ ░░ The job identifier is 2397. Feb 15 11:43:15 managed-node3 systemd[1]: Created slice machine-libpod_pod_19b0871d3b22d540cf1291d03caf7517425b7d1b8a58e5ebd05a65e6b2b03404.slice - cgroup machine-libpod_pod_19b0871d3b22d540cf1291d03caf7517425b7d1b8a58e5ebd05a65e6b2b03404.slice. ░░ Subject: A start job for unit machine-libpod_pod_19b0871d3b22d540cf1291d03caf7517425b7d1b8a58e5ebd05a65e6b2b03404.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_19b0871d3b22d540cf1291d03caf7517425b7d1b8a58e5ebd05a65e6b2b03404.slice has finished successfully. ░░ ░░ The job identifier is 2396. Feb 15 11:43:15 managed-node3 podman[59110]: 2025-02-15 11:43:15.182255965 -0500 EST m=+0.428756700 container create f5bcd2630f8b281b3fea21e5bb80b22b5a2efcf79c25870ad2c98f92002eff48 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=19b0871d3b22d540cf1291d03caf7517425b7d1b8a58e5ebd05a65e6b2b03404, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Feb 15 11:43:15 managed-node3 podman[59110]: 2025-02-15 11:43:15.186513793 -0500 EST m=+0.433014509 pod create 19b0871d3b22d540cf1291d03caf7517425b7d1b8a58e5ebd05a65e6b2b03404 (image=, name=quadlet-pod) Feb 15 11:43:15 managed-node3 quadlet-pod-pod-pod[59110]: 19b0871d3b22d540cf1291d03caf7517425b7d1b8a58e5ebd05a65e6b2b03404 Feb 15 11:43:15 managed-node3 kernel: podman0: port 1(veth0) entered blocking state Feb 15 11:43:15 managed-node3 kernel: podman0: port 1(veth0) entered disabled state Feb 15 11:43:15 managed-node3 kernel: veth0: entered allmulticast mode Feb 15 11:43:15 managed-node3 kernel: veth0: entered promiscuous mode Feb 15 11:43:15 managed-node3 kernel: podman0: port 1(veth0) entered blocking state Feb 15 11:43:15 managed-node3 kernel: podman0: port 1(veth0) entered forwarding state Feb 15 11:43:15 managed-node3 NetworkManager[728]: [1739637795.2484] manager: (podman0): new Bridge device (/org/freedesktop/NetworkManager/Devices/3) Feb 15 11:43:15 managed-node3 NetworkManager[728]: [1739637795.2496] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/4) Feb 15 11:43:15 managed-node3 (udev-worker)[59174]: Network interface NamePolicy= disabled on kernel command line. Feb 15 11:43:15 managed-node3 (udev-worker)[59173]: Network interface NamePolicy= disabled on kernel command line. Feb 15 11:43:15 managed-node3 NetworkManager[728]: [1739637795.2562] device (veth0): carrier: link connected Feb 15 11:43:15 managed-node3 NetworkManager[728]: [1739637795.2567] device (podman0): carrier: link connected Feb 15 11:43:15 managed-node3 NetworkManager[728]: [1739637795.2772] device (podman0): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Feb 15 11:43:15 managed-node3 NetworkManager[728]: [1739637795.2777] device (podman0): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Feb 15 11:43:15 managed-node3 NetworkManager[728]: [1739637795.2783] device (podman0): Activation: starting connection 'podman0' (1b33e365-c082-4244-b20e-f89ee955f7c7) Feb 15 11:43:15 managed-node3 NetworkManager[728]: [1739637795.2785] device (podman0): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Feb 15 11:43:15 managed-node3 NetworkManager[728]: [1739637795.2787] device (podman0): state change: prepare -> config (reason 'none', managed-type: 'external') Feb 15 11:43:15 managed-node3 NetworkManager[728]: [1739637795.2788] device (podman0): state change: config -> ip-config (reason 'none', managed-type: 'external') Feb 15 11:43:15 managed-node3 NetworkManager[728]: [1739637795.2820] device (podman0): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Feb 15 11:43:15 managed-node3 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2402. Feb 15 11:43:15 managed-node3 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2402. Feb 15 11:43:15 managed-node3 NetworkManager[728]: [1739637795.3152] device (podman0): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Feb 15 11:43:15 managed-node3 NetworkManager[728]: [1739637795.3156] device (podman0): state change: secondaries -> activated (reason 'none', managed-type: 'external') Feb 15 11:43:15 managed-node3 NetworkManager[728]: [1739637795.3170] device (podman0): Activation: successful, device activated. Feb 15 11:43:15 managed-node3 systemd[1]: Started libpod-f5bcd2630f8b281b3fea21e5bb80b22b5a2efcf79c25870ad2c98f92002eff48.scope - libcrun container. ░░ Subject: A start job for unit libpod-f5bcd2630f8b281b3fea21e5bb80b22b5a2efcf79c25870ad2c98f92002eff48.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-f5bcd2630f8b281b3fea21e5bb80b22b5a2efcf79c25870ad2c98f92002eff48.scope has finished successfully. ░░ ░░ The job identifier is 2481. Feb 15 11:43:15 managed-node3 podman[59162]: 2025-02-15 11:43:15.361789525 -0500 EST m=+0.151068664 container init f5bcd2630f8b281b3fea21e5bb80b22b5a2efcf79c25870ad2c98f92002eff48 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=19b0871d3b22d540cf1291d03caf7517425b7d1b8a58e5ebd05a65e6b2b03404, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service) Feb 15 11:43:15 managed-node3 podman[59162]: 2025-02-15 11:43:15.364519368 -0500 EST m=+0.153798743 container start f5bcd2630f8b281b3fea21e5bb80b22b5a2efcf79c25870ad2c98f92002eff48 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=19b0871d3b22d540cf1291d03caf7517425b7d1b8a58e5ebd05a65e6b2b03404, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Feb 15 11:43:15 managed-node3 podman[59162]: 2025-02-15 11:43:15.368815653 -0500 EST m=+0.158094701 pod start 19b0871d3b22d540cf1291d03caf7517425b7d1b8a58e5ebd05a65e6b2b03404 (image=, name=quadlet-pod) Feb 15 11:43:15 managed-node3 quadlet-pod-pod-pod[59162]: quadlet-pod Feb 15 11:43:15 managed-node3 systemd[1]: Started quadlet-pod-pod-pod.service. ░░ Subject: A start job for unit quadlet-pod-pod-pod.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-pod-pod.service has finished successfully. ░░ ░░ The job identifier is 2312. Feb 15 11:43:16 managed-node3 python3.12[59349]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:43:19 managed-node3 podman[59490]: 2025-02-15 11:43:19.176011997 -0500 EST m=+1.078503057 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Feb 15 11:43:19 managed-node3 python3.12[59655]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Feb 15 11:43:20 managed-node3 python3.12[59786]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-pod-container.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Feb 15 11:43:20 managed-node3 python3.12[59891]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1739637799.9090035-22970-70971864428544/.source.container dest=/etc/containers/systemd/quadlet-pod-container.container owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=f0b5c8159fc3c65bf9310a371751609e4c1ba4c3 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Feb 15 11:43:21 managed-node3 python3.12[60022]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Feb 15 11:43:21 managed-node3 systemd[1]: Reload requested from client PID 60023 ('systemctl') (unit session-8.scope)... Feb 15 11:43:21 managed-node3 systemd[1]: Reloading... Feb 15 11:43:21 managed-node3 systemd-rc-local-generator[60067]: /etc/rc.d/rc.local is not marked executable, skipping. Feb 15 11:43:21 managed-node3 systemd[1]: Reloading finished in 221 ms. Feb 15 11:43:21 managed-node3 python3.12[60207]: ansible-systemd Invoked with name=quadlet-pod-container.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Feb 15 11:43:21 managed-node3 systemd[1]: Starting quadlet-pod-container.service... ░░ Subject: A start job for unit quadlet-pod-container.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-container.service has begun execution. ░░ ░░ The job identifier is 2488. Feb 15 11:43:21 managed-node3 podman[60211]: 2025-02-15 11:43:21.994361908 -0500 EST m=+0.048786952 container create 415ed1f9fb5de83ac3a901092719b57637979183127816e58ec461b78409710d (image=quay.io/libpod/testimage:20210610, name=quadlet-pod-container, pod_id=19b0871d3b22d540cf1291d03caf7517425b7d1b8a58e5ebd05a65e6b2b03404, created_by=test/system/build-testimage, io.buildah.version=1.21.0, PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service, created_at=2021-06-10T18:55:36Z) Feb 15 11:43:22 managed-node3 podman[60211]: 2025-02-15 11:43:22.047390815 -0500 EST m=+0.101815921 container init 415ed1f9fb5de83ac3a901092719b57637979183127816e58ec461b78409710d (image=quay.io/libpod/testimage:20210610, name=quadlet-pod-container, pod_id=19b0871d3b22d540cf1291d03caf7517425b7d1b8a58e5ebd05a65e6b2b03404, PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Feb 15 11:43:22 managed-node3 systemd[1]: Started quadlet-pod-container.service. ░░ Subject: A start job for unit quadlet-pod-container.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-container.service has finished successfully. ░░ ░░ The job identifier is 2488. Feb 15 11:43:22 managed-node3 podman[60211]: 2025-02-15 11:43:22.05177904 -0500 EST m=+0.106204249 container start 415ed1f9fb5de83ac3a901092719b57637979183127816e58ec461b78409710d (image=quay.io/libpod/testimage:20210610, name=quadlet-pod-container, pod_id=19b0871d3b22d540cf1291d03caf7517425b7d1b8a58e5ebd05a65e6b2b03404, PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Feb 15 11:43:22 managed-node3 quadlet-pod-container[60211]: 415ed1f9fb5de83ac3a901092719b57637979183127816e58ec461b78409710d Feb 15 11:43:22 managed-node3 podman[60211]: 2025-02-15 11:43:21.970794622 -0500 EST m=+0.025219746 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Feb 15 11:43:22 managed-node3 python3.12[60356]: ansible-ansible.legacy.command Invoked with _raw_params=cat /etc/containers/systemd/quadlet-pod-container.container _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:43:24 managed-node3 python3.12[60488]: ansible-ansible.legacy.command Invoked with _raw_params=cat /etc/containers/systemd/quadlet-pod-pod.pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:43:24 managed-node3 python3.12[60620]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect quadlet-pod --format '{{range .Containers}}{{.Name}} {{end}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:43:25 managed-node3 python3.12[60760]: ansible-user Invoked with name=user_quadlet_pod uid=2223 state=present non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node3 update_password=always group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Feb 15 11:43:25 managed-node3 useradd[60763]: new group: name=user_quadlet_pod, GID=2223 Feb 15 11:43:25 managed-node3 useradd[60763]: new user: name=user_quadlet_pod, UID=2223, GID=2223, home=/home/user_quadlet_pod, shell=/bin/bash, from=/dev/pts/0 Feb 15 11:43:25 managed-node3 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Feb 15 11:43:27 managed-node3 python3.12[61026]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:43:28 managed-node3 python3.12[61163]: ansible-getent Invoked with database=passwd key=user_quadlet_pod fail_key=False service=None split=None Feb 15 11:43:29 managed-node3 python3.12[61295]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:43:29 managed-node3 python3.12[61428]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:43:30 managed-node3 python3.12[61560]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:43:33 managed-node3 python3.12[61692]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:43:33 managed-node3 python3.12[61825]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:43:33 managed-node3 python3.12[61957]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:43:35 managed-node3 python3.12[62089]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/user_quadlet_pod _raw_params=loginctl enable-linger user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Feb 15 11:43:35 managed-node3 systemd[1]: Created slice user-2223.slice - User Slice of UID 2223. ░░ Subject: A start job for unit user-2223.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-2223.slice has finished successfully. ░░ ░░ The job identifier is 2651. Feb 15 11:43:35 managed-node3 systemd[1]: Starting user-runtime-dir@2223.service - User Runtime Directory /run/user/2223... ░░ Subject: A start job for unit user-runtime-dir@2223.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@2223.service has begun execution. ░░ ░░ The job identifier is 2573. Feb 15 11:43:35 managed-node3 systemd[1]: Finished user-runtime-dir@2223.service - User Runtime Directory /run/user/2223. ░░ Subject: A start job for unit user-runtime-dir@2223.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@2223.service has finished successfully. ░░ ░░ The job identifier is 2573. Feb 15 11:43:35 managed-node3 systemd[1]: Starting user@2223.service - User Manager for UID 2223... ░░ Subject: A start job for unit user@2223.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@2223.service has begun execution. ░░ ░░ The job identifier is 2653. Feb 15 11:43:35 managed-node3 systemd-logind[56322]: New session 10 of user user_quadlet_pod. ░░ Subject: A new session 10 has been created for user user_quadlet_pod ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 10 has been created for the user user_quadlet_pod. ░░ ░░ The leading process of the session is 62093. Feb 15 11:43:35 managed-node3 (systemd)[62093]: pam_unix(systemd-user:session): session opened for user user_quadlet_pod(uid=2223) by user_quadlet_pod(uid=0) Feb 15 11:43:35 managed-node3 systemd[62093]: Queued start job for default target default.target. Feb 15 11:43:35 managed-node3 systemd[62093]: Created slice app.slice - User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 7. Feb 15 11:43:35 managed-node3 systemd[62093]: Started grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 11. Feb 15 11:43:35 managed-node3 systemd[62093]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Feb 15 11:43:35 managed-node3 systemd[62093]: Reached target paths.target - Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Feb 15 11:43:35 managed-node3 systemd[62093]: Reached target timers.target - Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 10. Feb 15 11:43:35 managed-node3 systemd[62093]: Starting dbus.socket - D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 6. Feb 15 11:43:35 managed-node3 systemd[62093]: Starting systemd-tmpfiles-setup.service - Create User Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 9. Feb 15 11:43:35 managed-node3 systemd[62093]: Finished systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 9. Feb 15 11:43:35 managed-node3 systemd[62093]: Listening on dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 6. Feb 15 11:43:35 managed-node3 systemd[62093]: Reached target sockets.target - Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 5. Feb 15 11:43:35 managed-node3 systemd[62093]: Reached target basic.target - Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Feb 15 11:43:35 managed-node3 systemd[62093]: Reached target default.target - Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Feb 15 11:43:35 managed-node3 systemd[62093]: Startup finished in 71ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 2223 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 71577 microseconds. Feb 15 11:43:35 managed-node3 systemd[1]: Started user@2223.service - User Manager for UID 2223. ░░ Subject: A start job for unit user@2223.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@2223.service has finished successfully. ░░ ░░ The job identifier is 2653. Feb 15 11:43:35 managed-node3 python3.12[62239]: ansible-file Invoked with path=/home/user_quadlet_pod/.config/containers/systemd state=directory owner=user_quadlet_pod group=2223 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Feb 15 11:43:36 managed-node3 python3.12[62370]: ansible-ansible.legacy.stat Invoked with path=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Feb 15 11:43:36 managed-node3 python3.12[62475]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1739637816.2245555-23775-168630216824945/.source.pod dest=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod owner=user_quadlet_pod group=2223 mode=0644 follow=False _original_basename=systemd.j2 checksum=1884c880482430d8bf2e944b003734fb8b7a462d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Feb 15 11:43:37 managed-node3 sudo[62648]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-esexohwycctglnkeyoxrcxvcanbsrudi ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637817.081443-23810-142031500391907/AnsiballZ_systemd.py' Feb 15 11:43:37 managed-node3 sudo[62648]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Feb 15 11:43:37 managed-node3 python3.12[62651]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Feb 15 11:43:37 managed-node3 python3.12[62651]: ansible-systemd [WARNING] Module remote_tmp /home/user_quadlet_pod/.ansible/tmp did not exist and was created with a mode of 0700, this may cause issues when running as another user. To avoid this, create the remote_tmp dir with the correct permissions manually Feb 15 11:43:37 managed-node3 systemd[62093]: Reload requested from client PID 62652 ('systemctl')... Feb 15 11:43:37 managed-node3 systemd[62093]: Reloading... Feb 15 11:43:37 managed-node3 systemd[62093]: Reloading finished in 44 ms. Feb 15 11:43:37 managed-node3 sudo[62648]: pam_unix(sudo:session): session closed for user user_quadlet_pod Feb 15 11:43:37 managed-node3 sudo[62834]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-erczbbammjdvtydvdnqwfswgzyamrfnq ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637817.7428129-23825-182956840761292/AnsiballZ_systemd.py' Feb 15 11:43:37 managed-node3 sudo[62834]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Feb 15 11:43:38 managed-node3 python3.12[62837]: ansible-systemd Invoked with name=quadlet-pod-pod-pod.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Feb 15 11:43:38 managed-node3 systemd[62093]: Starting podman-user-wait-network-online.service - Wait for system level network-online.target as user.... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 25. Feb 15 11:43:38 managed-node3 sh[62841]: active Feb 15 11:43:38 managed-node3 systemd[62093]: Finished podman-user-wait-network-online.service - Wait for system level network-online.target as user.. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 25. Feb 15 11:43:38 managed-node3 systemd[62093]: Starting quadlet-pod-pod-pod.service... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Feb 15 11:43:38 managed-node3 systemd[62093]: Created slice session.slice - User Core Session Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 27. Feb 15 11:43:38 managed-node3 systemd[62093]: Starting dbus-broker.service - D-Bus User Message Bus... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 26. Feb 15 11:43:38 managed-node3 dbus-broker-launch[62915]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Feb 15 11:43:38 managed-node3 dbus-broker-launch[62915]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Feb 15 11:43:38 managed-node3 systemd[62093]: Started dbus-broker.service - D-Bus User Message Bus. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 26. Feb 15 11:43:38 managed-node3 dbus-broker-launch[62915]: Ready Feb 15 11:43:38 managed-node3 systemd[62093]: Created slice user.slice - Slice /user. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 33. Feb 15 11:43:38 managed-node3 systemd[62093]: Created slice user-libpod_pod_5dcbb260c406d273ad01a65e2be7db94795fb733fd4ebe7678134de78cf9c7b9.slice - cgroup user-libpod_pod_5dcbb260c406d273ad01a65e2be7db94795fb733fd4ebe7678134de78cf9c7b9.slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 32. Feb 15 11:43:38 managed-node3 quadlet-pod-pod-pod[62849]: 5dcbb260c406d273ad01a65e2be7db94795fb733fd4ebe7678134de78cf9c7b9 Feb 15 11:43:38 managed-node3 systemd[62093]: Started podman-pause-81a92628.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 36. Feb 15 11:43:38 managed-node3 systemd[62093]: Started libpod-e7cbdcd4a4d2be80d6b6f0f39970de5bc37d2ace3a5dabddf0189f66419fb679.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 40. Feb 15 11:43:39 managed-node3 quadlet-pod-pod-pod[62917]: quadlet-pod Feb 15 11:43:39 managed-node3 systemd[62093]: Started quadlet-pod-pod-pod.service. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Feb 15 11:43:39 managed-node3 sudo[62834]: pam_unix(sudo:session): session closed for user user_quadlet_pod Feb 15 11:43:39 managed-node3 python3.12[63073]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:43:40 managed-node3 python3.12[63206]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:43:40 managed-node3 python3.12[63338]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:43:42 managed-node3 python3.12[63470]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/user_quadlet_pod _raw_params=loginctl enable-linger user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Feb 15 11:43:42 managed-node3 sudo[63643]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lnegpirqnwhkozslrwcttzrozybhjkil ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637822.4803877-23987-128223367373765/AnsiballZ_podman_image.py' Feb 15 11:43:42 managed-node3 sudo[63643]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Feb 15 11:43:42 managed-node3 systemd[62093]: Started podman-63647.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 45. Feb 15 11:43:42 managed-node3 systemd[62093]: Started podman-63655.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 49. Feb 15 11:43:43 managed-node3 systemd[62093]: Started podman-63679.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 53. Feb 15 11:43:43 managed-node3 sudo[63643]: pam_unix(sudo:session): session closed for user user_quadlet_pod Feb 15 11:43:44 managed-node3 python3.12[63816]: ansible-file Invoked with path=/home/user_quadlet_pod/.config/containers/systemd state=directory owner=user_quadlet_pod group=2223 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Feb 15 11:43:44 managed-node3 python3.12[63947]: ansible-ansible.legacy.stat Invoked with path=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Feb 15 11:43:45 managed-node3 python3.12[64052]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1739637824.605515-24058-264025358587335/.source.container dest=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container owner=user_quadlet_pod group=2223 mode=0644 follow=False _original_basename=systemd.j2 checksum=f0b5c8159fc3c65bf9310a371751609e4c1ba4c3 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Feb 15 11:43:45 managed-node3 sudo[64225]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wdzymwgecuhgakahugsocxpciwtubexc ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637825.39142-24090-215949958232410/AnsiballZ_systemd.py' Feb 15 11:43:45 managed-node3 sudo[64225]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Feb 15 11:43:45 managed-node3 python3.12[64228]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Feb 15 11:43:45 managed-node3 systemd[62093]: Reload requested from client PID 64229 ('systemctl')... Feb 15 11:43:45 managed-node3 systemd[62093]: Reloading... Feb 15 11:43:45 managed-node3 systemd[62093]: Reloading finished in 53 ms. Feb 15 11:43:45 managed-node3 sudo[64225]: pam_unix(sudo:session): session closed for user user_quadlet_pod Feb 15 11:43:46 managed-node3 sudo[64411]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xjxrotbdfagbtxygdqihwvmxidgudxtt ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637826.0728385-24115-267137023247104/AnsiballZ_systemd.py' Feb 15 11:43:46 managed-node3 sudo[64411]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Feb 15 11:43:46 managed-node3 python3.12[64414]: ansible-systemd Invoked with name=quadlet-pod-container.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Feb 15 11:43:46 managed-node3 systemd[62093]: Starting quadlet-pod-container.service... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 57. Feb 15 11:43:46 managed-node3 systemd[62093]: Started quadlet-pod-container.service. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 57. Feb 15 11:43:46 managed-node3 quadlet-pod-container[64417]: b8824a16aadc58dec79d949f9d3778ba802f5645c4b5b015379e1e43f176b9c2 Feb 15 11:43:46 managed-node3 sudo[64411]: pam_unix(sudo:session): session closed for user user_quadlet_pod Feb 15 11:43:47 managed-node3 python3.12[64562]: ansible-ansible.legacy.command Invoked with _raw_params=cat /home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:43:47 managed-node3 python3.12[64694]: ansible-ansible.legacy.command Invoked with _raw_params=cat /home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:43:48 managed-node3 sudo[64868]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lbrabqhilyludoghfczuhuommiqkfqrj ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637827.831389-24173-70886584996704/AnsiballZ_command.py' Feb 15 11:43:48 managed-node3 sudo[64868]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Feb 15 11:43:48 managed-node3 python3.12[64871]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect quadlet-pod --format '{{range .Containers}}{{.Name}} {{end}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:43:48 managed-node3 systemd[62093]: Started podman-64872.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 71. Feb 15 11:43:48 managed-node3 sudo[64868]: pam_unix(sudo:session): session closed for user user_quadlet_pod Feb 15 11:43:48 managed-node3 python3.12[65010]: ansible-stat Invoked with path=/var/lib/systemd/linger/user_quadlet_pod follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:43:50 managed-node3 python3.12[65274]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:43:51 managed-node3 python3.12[65411]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:43:51 managed-node3 python3.12[65544]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:43:52 managed-node3 python3.12[65676]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:43:54 managed-node3 python3.12[65808]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:43:54 managed-node3 python3.12[65941]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:43:55 managed-node3 python3.12[66073]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:43:56 managed-node3 python3.12[66205]: ansible-stat Invoked with path=/run/user/2223 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:43:56 managed-node3 sudo[66380]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dniwxutstkpejdciuhgsgccgelqoseyd ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637836.3414338-24467-241863919385029/AnsiballZ_systemd.py' Feb 15 11:43:56 managed-node3 sudo[66380]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Feb 15 11:43:56 managed-node3 python3.12[66383]: ansible-systemd Invoked with name=quadlet-pod-container.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Feb 15 11:43:56 managed-node3 systemd[62093]: Reload requested from client PID 66386 ('systemctl')... Feb 15 11:43:56 managed-node3 systemd[62093]: Reloading... Feb 15 11:43:56 managed-node3 systemd[62093]: Reloading finished in 51 ms. Feb 15 11:43:56 managed-node3 systemd[62093]: Stopping quadlet-pod-container.service... ░░ Subject: A stop job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has begun execution. ░░ ░░ The job identifier is 75. Feb 15 11:44:06 managed-node3 quadlet-pod-container[66397]: time="2025-02-15T11:44:06-05:00" level=warning msg="StopSignal SIGTERM failed to stop container quadlet-pod-container in 10 seconds, resorting to SIGKILL" Feb 15 11:44:07 managed-node3 quadlet-pod-container[66397]: b8824a16aadc58dec79d949f9d3778ba802f5645c4b5b015379e1e43f176b9c2 Feb 15 11:44:07 managed-node3 systemd[62093]: quadlet-pod-container.service: Main process exited, code=exited, status=137/n/a ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit UNIT has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 137. Feb 15 11:44:07 managed-node3 systemd[62093]: Removed slice user-libpod_pod_5dcbb260c406d273ad01a65e2be7db94795fb733fd4ebe7678134de78cf9c7b9.slice - cgroup user-libpod_pod_5dcbb260c406d273ad01a65e2be7db94795fb733fd4ebe7678134de78cf9c7b9.slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 76 and the job result is done. Feb 15 11:44:07 managed-node3 systemd[62093]: user-libpod_pod_5dcbb260c406d273ad01a65e2be7db94795fb733fd4ebe7678134de78cf9c7b9.slice: Failed to open /run/user/2223/systemd/transient/user-libpod_pod_5dcbb260c406d273ad01a65e2be7db94795fb733fd4ebe7678134de78cf9c7b9.slice: No such file or directory Feb 15 11:44:07 managed-node3 systemd[62093]: user-libpod_pod_5dcbb260c406d273ad01a65e2be7db94795fb733fd4ebe7678134de78cf9c7b9.slice: Failed to open /run/user/2223/systemd/transient/user-libpod_pod_5dcbb260c406d273ad01a65e2be7db94795fb733fd4ebe7678134de78cf9c7b9.slice: No such file or directory Feb 15 11:44:07 managed-node3 quadlet-pod-pod-pod[66426]: quadlet-pod Feb 15 11:44:07 managed-node3 systemd[62093]: quadlet-pod-container.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit UNIT has entered the 'failed' state with result 'exit-code'. Feb 15 11:44:07 managed-node3 systemd[62093]: Stopped quadlet-pod-container.service. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 75 and the job result is done. Feb 15 11:44:07 managed-node3 sudo[66380]: pam_unix(sudo:session): session closed for user user_quadlet_pod Feb 15 11:44:07 managed-node3 systemd[62093]: user-libpod_pod_5dcbb260c406d273ad01a65e2be7db94795fb733fd4ebe7678134de78cf9c7b9.slice: Failed to open /run/user/2223/systemd/transient/user-libpod_pod_5dcbb260c406d273ad01a65e2be7db94795fb733fd4ebe7678134de78cf9c7b9.slice: No such file or directory Feb 15 11:44:07 managed-node3 quadlet-pod-pod-pod[66444]: 5dcbb260c406d273ad01a65e2be7db94795fb733fd4ebe7678134de78cf9c7b9 Feb 15 11:44:07 managed-node3 python3.12[66585]: ansible-stat Invoked with path=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:44:08 managed-node3 python3.12[66850]: ansible-file Invoked with path=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Feb 15 11:44:09 managed-node3 sudo[67023]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-iuqksiubpwjkvbucwdaapremgejpwzpw ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637848.9848597-24729-252857019734872/AnsiballZ_systemd.py' Feb 15 11:44:09 managed-node3 sudo[67023]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Feb 15 11:44:09 managed-node3 python3.12[67026]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Feb 15 11:44:09 managed-node3 systemd[62093]: Reload requested from client PID 67027 ('systemctl')... Feb 15 11:44:09 managed-node3 systemd[62093]: Reloading... Feb 15 11:44:09 managed-node3 systemd[62093]: Reloading finished in 48 ms. Feb 15 11:44:09 managed-node3 sudo[67023]: pam_unix(sudo:session): session closed for user user_quadlet_pod Feb 15 11:44:09 managed-node3 sudo[67210]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nmzarvaoqqpdttvvlxuhyaeibtoqanug ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637849.6718123-24756-28474501376966/AnsiballZ_command.py' Feb 15 11:44:09 managed-node3 sudo[67210]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Feb 15 11:44:10 managed-node3 systemd[62093]: Started podman-67214.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 77. Feb 15 11:44:10 managed-node3 sudo[67210]: pam_unix(sudo:session): session closed for user user_quadlet_pod Feb 15 11:44:10 managed-node3 sudo[67395]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jdawapprliyakavfxwnfwrvbqsmlejyl ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637850.2455535-24777-69664901840489/AnsiballZ_command.py' Feb 15 11:44:10 managed-node3 sudo[67395]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Feb 15 11:44:10 managed-node3 python3.12[67399]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:44:10 managed-node3 systemd[62093]: Started podman-67400.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 81. Feb 15 11:44:10 managed-node3 sudo[67395]: pam_unix(sudo:session): session closed for user user_quadlet_pod Feb 15 11:44:11 managed-node3 sudo[67580]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-opchwhmpgaqaysjhvwloumunylrddiyy ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637850.9955125-24809-56525204011821/AnsiballZ_command.py' Feb 15 11:44:11 managed-node3 sudo[67580]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Feb 15 11:44:11 managed-node3 python3.12[67583]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:44:11 managed-node3 systemd[62093]: Started podman-67584.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 85. Feb 15 11:44:11 managed-node3 sudo[67580]: pam_unix(sudo:session): session closed for user user_quadlet_pod Feb 15 11:44:11 managed-node3 sudo[67764]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vlkbgdvslulqbrbhrcyiypetworjdiax ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637851.4873707-24823-137339085554379/AnsiballZ_command.py' Feb 15 11:44:11 managed-node3 sudo[67764]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Feb 15 11:44:11 managed-node3 python3.12[67767]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:44:11 managed-node3 systemd[62093]: Started podman-67768.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 89. Feb 15 11:44:11 managed-node3 sudo[67764]: pam_unix(sudo:session): session closed for user user_quadlet_pod Feb 15 11:44:12 managed-node3 sudo[67947]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zfrnuzngzcurrtykelttquqxluyakwfw ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637851.9978194-24845-138419201424763/AnsiballZ_command.py' Feb 15 11:44:12 managed-node3 sudo[67947]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Feb 15 11:44:12 managed-node3 python3.12[67950]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:44:12 managed-node3 systemd[62093]: Started podman-67951.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 93. Feb 15 11:44:12 managed-node3 sudo[67947]: pam_unix(sudo:session): session closed for user user_quadlet_pod Feb 15 11:44:12 managed-node3 sudo[68131]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-yhjaukyqlkvuxaxifjmxitydxadzmbvv ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637852.5269797-24862-106593142858014/AnsiballZ_command.py' Feb 15 11:44:12 managed-node3 sudo[68131]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Feb 15 11:44:12 managed-node3 python3.12[68134]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:44:12 managed-node3 systemd[62093]: Started podman-68135.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 97. Feb 15 11:44:12 managed-node3 sudo[68131]: pam_unix(sudo:session): session closed for user user_quadlet_pod Feb 15 11:44:13 managed-node3 sudo[68315]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ltgspfffpjaqpvsjcbbpysrsjhvspmuy ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637853.0314982-24886-130452991930393/AnsiballZ_command.py' Feb 15 11:44:13 managed-node3 sudo[68315]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Feb 15 11:44:13 managed-node3 systemd[62093]: Started podman-68319.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 101. Feb 15 11:44:13 managed-node3 sudo[68315]: pam_unix(sudo:session): session closed for user user_quadlet_pod Feb 15 11:44:13 managed-node3 sudo[68499]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-daeodeftojvjgxqnxbclgzzcrasremdt ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637853.5194123-24907-146352800460381/AnsiballZ_command.py' Feb 15 11:44:13 managed-node3 sudo[68499]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Feb 15 11:44:13 managed-node3 systemd[62093]: Started podman-68503.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 105. Feb 15 11:44:13 managed-node3 sudo[68499]: pam_unix(sudo:session): session closed for user user_quadlet_pod Feb 15 11:44:14 managed-node3 sudo[68683]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-urydvzejnktdqckhrsvzdgaxgfvqfxfe ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637854.0641904-24929-133279432238856/AnsiballZ_service_facts.py' Feb 15 11:44:14 managed-node3 sudo[68683]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Feb 15 11:44:14 managed-node3 python3.12[68686]: ansible-service_facts Invoked Feb 15 11:44:16 managed-node3 sudo[68683]: pam_unix(sudo:session): session closed for user user_quadlet_pod Feb 15 11:44:16 managed-node3 python3.12[68927]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:44:17 managed-node3 python3.12[69060]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:44:17 managed-node3 python3.12[69192]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:44:19 managed-node3 python3.12[69324]: ansible-stat Invoked with path=/run/user/2223 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:44:19 managed-node3 sudo[69499]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-drxgofmpeddxekktocscblcxypmxfcjg ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637859.2139947-25063-216581050480821/AnsiballZ_systemd.py' Feb 15 11:44:19 managed-node3 sudo[69499]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Feb 15 11:44:19 managed-node3 python3.12[69502]: ansible-systemd Invoked with name=quadlet-pod-pod-pod.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Feb 15 11:44:19 managed-node3 systemd[62093]: Reload requested from client PID 69505 ('systemctl')... Feb 15 11:44:19 managed-node3 systemd[62093]: Reloading... Feb 15 11:44:19 managed-node3 systemd[62093]: Reloading finished in 46 ms. Feb 15 11:44:19 managed-node3 sudo[69499]: pam_unix(sudo:session): session closed for user user_quadlet_pod Feb 15 11:44:20 managed-node3 python3.12[69645]: ansible-stat Invoked with path=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:44:21 managed-node3 python3.12[69909]: ansible-file Invoked with path=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Feb 15 11:44:21 managed-node3 sudo[70082]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-umyawxdlaitxxvvamtnllbgyzicbcsag ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637861.2940896-25115-119340472731815/AnsiballZ_systemd.py' Feb 15 11:44:21 managed-node3 sudo[70082]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Feb 15 11:44:21 managed-node3 python3.12[70085]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Feb 15 11:44:21 managed-node3 systemd[62093]: Reload requested from client PID 70086 ('systemctl')... Feb 15 11:44:21 managed-node3 systemd[62093]: Reloading... Feb 15 11:44:21 managed-node3 systemd[62093]: Reloading finished in 47 ms. Feb 15 11:44:21 managed-node3 sudo[70082]: pam_unix(sudo:session): session closed for user user_quadlet_pod Feb 15 11:44:22 managed-node3 sudo[70268]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xynuudsfqkxddxyexnxydmjecrajffic ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637861.9940612-25138-236986964499487/AnsiballZ_command.py' Feb 15 11:44:22 managed-node3 sudo[70268]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Feb 15 11:44:22 managed-node3 systemd[62093]: Started podman-70272.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 109. Feb 15 11:44:22 managed-node3 sudo[70268]: pam_unix(sudo:session): session closed for user user_quadlet_pod Feb 15 11:44:22 managed-node3 sudo[70451]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kghhmeupfkuyjzsromgkwjthaomdssii ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637862.6538496-25164-214646408717280/AnsiballZ_command.py' Feb 15 11:44:22 managed-node3 sudo[70451]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Feb 15 11:44:22 managed-node3 python3.12[70454]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:44:23 managed-node3 systemd[62093]: Started podman-70455.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 113. Feb 15 11:44:23 managed-node3 sudo[70451]: pam_unix(sudo:session): session closed for user user_quadlet_pod Feb 15 11:44:23 managed-node3 sudo[70635]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lpglrpcbkgstobxetxgrnnodjylevqtd ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637863.3496754-25193-123673130441281/AnsiballZ_command.py' Feb 15 11:44:23 managed-node3 sudo[70635]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Feb 15 11:44:23 managed-node3 python3.12[70638]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:44:23 managed-node3 systemd[62093]: Started podman-70639.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 117. Feb 15 11:44:23 managed-node3 sudo[70635]: pam_unix(sudo:session): session closed for user user_quadlet_pod Feb 15 11:44:24 managed-node3 sudo[70818]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rnfjxuutzokqmmueluamuwteymagxsqc ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637863.8471754-25212-113374440739781/AnsiballZ_command.py' Feb 15 11:44:24 managed-node3 sudo[70818]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Feb 15 11:44:24 managed-node3 python3.12[70821]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:44:24 managed-node3 systemd[62093]: Started podman-70822.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 121. Feb 15 11:44:24 managed-node3 sudo[70818]: pam_unix(sudo:session): session closed for user user_quadlet_pod Feb 15 11:44:24 managed-node3 sudo[71001]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ziyhonhdpicshwmewcncrizjblkzlkdj ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637864.3484652-25230-193941628033420/AnsiballZ_command.py' Feb 15 11:44:24 managed-node3 sudo[71001]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Feb 15 11:44:24 managed-node3 python3.12[71004]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:44:24 managed-node3 systemd[62093]: Started podman-71005.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 125. Feb 15 11:44:24 managed-node3 sudo[71001]: pam_unix(sudo:session): session closed for user user_quadlet_pod Feb 15 11:44:25 managed-node3 sudo[71186]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cncyjqewcgjzdonctnhfctqzisszzycn ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637864.8519754-25247-19622728191158/AnsiballZ_command.py' Feb 15 11:44:25 managed-node3 sudo[71186]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Feb 15 11:44:25 managed-node3 python3.12[71189]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:44:25 managed-node3 systemd[62093]: Started podman-71190.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 129. Feb 15 11:44:25 managed-node3 sudo[71186]: pam_unix(sudo:session): session closed for user user_quadlet_pod Feb 15 11:44:25 managed-node3 sudo[71369]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-padlnbrzqzsaijwgvuqmpwzbmnnkspxk ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637865.3727086-25269-10561284625868/AnsiballZ_command.py' Feb 15 11:44:25 managed-node3 sudo[71369]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Feb 15 11:44:25 managed-node3 systemd[62093]: Started podman-71373.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 133. Feb 15 11:44:25 managed-node3 sudo[71369]: pam_unix(sudo:session): session closed for user user_quadlet_pod Feb 15 11:44:26 managed-node3 sudo[71553]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vqbvwmflbmxesltgaxzfqvzrurwrxpkz ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637865.8823805-25290-61089256569782/AnsiballZ_command.py' Feb 15 11:44:26 managed-node3 sudo[71553]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Feb 15 11:44:26 managed-node3 systemd[62093]: Started podman-71557.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 137. Feb 15 11:44:26 managed-node3 sudo[71553]: pam_unix(sudo:session): session closed for user user_quadlet_pod Feb 15 11:44:26 managed-node3 sudo[71736]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nwzspvsimrxcineigiuisejcpfklyang ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637866.4759972-25311-157700782642828/AnsiballZ_service_facts.py' Feb 15 11:44:26 managed-node3 sudo[71736]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Feb 15 11:44:26 managed-node3 python3.12[71739]: ansible-service_facts Invoked Feb 15 11:44:28 managed-node3 sudo[71736]: pam_unix(sudo:session): session closed for user user_quadlet_pod Feb 15 11:44:28 managed-node3 python3.12[71981]: ansible-stat Invoked with path=/run/user/2223 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:44:29 managed-node3 sudo[72156]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-oawcybubzcuswtwwmemciidzfrgumkne ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637869.0516264-25371-132533964179341/AnsiballZ_podman_container_info.py' Feb 15 11:44:29 managed-node3 sudo[72156]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Feb 15 11:44:29 managed-node3 python3.12[72159]: ansible-containers.podman.podman_container_info Invoked with executable=podman name=None Feb 15 11:44:29 managed-node3 systemd[62093]: Started podman-72160.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 141. Feb 15 11:44:29 managed-node3 sudo[72156]: pam_unix(sudo:session): session closed for user user_quadlet_pod Feb 15 11:44:29 managed-node3 sudo[72341]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cmyegcgtzlgcgcglwkwsapvttjxnmsyh ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637869.627143-25381-242926722772935/AnsiballZ_command.py' Feb 15 11:44:29 managed-node3 sudo[72341]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Feb 15 11:44:29 managed-node3 python3.12[72344]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:44:30 managed-node3 systemd[62093]: Started podman-72345.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 145. Feb 15 11:44:30 managed-node3 sudo[72341]: pam_unix(sudo:session): session closed for user user_quadlet_pod Feb 15 11:44:30 managed-node3 sudo[72526]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ahlhpnoipjzhxzaaylmijnwhianmurvj ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1739637870.1656148-25401-72806318965148/AnsiballZ_command.py' Feb 15 11:44:30 managed-node3 sudo[72526]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Feb 15 11:44:30 managed-node3 python3.12[72531]: ansible-ansible.legacy.command Invoked with _raw_params=podman secret ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:44:30 managed-node3 systemd[62093]: Started podman-72532.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 149. Feb 15 11:44:30 managed-node3 sudo[72526]: pam_unix(sudo:session): session closed for user user_quadlet_pod Feb 15 11:44:31 managed-node3 python3.12[72670]: ansible-ansible.legacy.command Invoked with removes=/var/lib/systemd/linger/user_quadlet_pod _raw_params=loginctl disable-linger user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None stdin=None Feb 15 11:44:31 managed-node3 systemd[1]: Stopping user@2223.service - User Manager for UID 2223... ░░ Subject: A stop job for unit user@2223.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user@2223.service has begun execution. ░░ ░░ The job identifier is 2741. Feb 15 11:44:31 managed-node3 systemd[62093]: Activating special unit exit.target... Feb 15 11:44:31 managed-node3 systemd[62093]: Stopping podman-pause-81a92628.scope... ░░ Subject: A stop job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has begun execution. ░░ ░░ The job identifier is 165. Feb 15 11:44:31 managed-node3 systemd[62093]: Stopped target default.target - Main User Target. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 166 and the job result is done. Feb 15 11:44:31 managed-node3 systemd[62093]: Stopped podman-user-wait-network-online.service - Wait for system level network-online.target as user.. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 162 and the job result is done. Feb 15 11:44:31 managed-node3 systemd[62093]: Stopped target basic.target - Basic System. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 171 and the job result is done. Feb 15 11:44:31 managed-node3 systemd[62093]: Stopped target paths.target - Paths. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 168 and the job result is done. Feb 15 11:44:31 managed-node3 systemd[62093]: Stopped target sockets.target - Sockets. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 169 and the job result is done. Feb 15 11:44:31 managed-node3 systemd[62093]: Stopped target timers.target - Timers. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 172 and the job result is done. Feb 15 11:44:31 managed-node3 systemd[62093]: Stopped grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 163 and the job result is done. Feb 15 11:44:31 managed-node3 systemd[62093]: Stopped systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 174 and the job result is done. Feb 15 11:44:31 managed-node3 dbus-broker[62916]: Dispatched 1926 messages @ 4(±20)μs / message. ░░ Subject: Dispatched 1926 messages ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ This message is printed by dbus-broker when shutting down. It includes metric ░░ information collected during the runtime of dbus-broker. ░░ ░░ The message lists the number of dispatched messages ░░ (in this case 1926) as well as the mean time to ░░ handling a single message. The time measurements exclude the time spent on ░░ writing to and reading from the kernel. Feb 15 11:44:31 managed-node3 systemd[62093]: Stopping dbus-broker.service - D-Bus User Message Bus... ░░ Subject: A stop job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has begun execution. ░░ ░░ The job identifier is 160. Feb 15 11:44:31 managed-node3 systemd[62093]: Stopped systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 161 and the job result is done. Feb 15 11:44:31 managed-node3 systemd[62093]: Stopped podman-pause-81a92628.scope. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 165 and the job result is done. Feb 15 11:44:31 managed-node3 systemd[62093]: Removed slice user.slice - Slice /user. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 164 and the job result is done. Feb 15 11:44:31 managed-node3 systemd[62093]: Stopped dbus-broker.service - D-Bus User Message Bus. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 160 and the job result is done. Feb 15 11:44:31 managed-node3 systemd[62093]: Removed slice session.slice - User Core Session Slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 173 and the job result is done. Feb 15 11:44:31 managed-node3 systemd[62093]: Closed dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 159 and the job result is done. Feb 15 11:44:31 managed-node3 systemd[62093]: Removed slice app.slice - User Application Slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 157 and the job result is done. Feb 15 11:44:31 managed-node3 systemd[62093]: app.slice: Consumed 1.007s CPU time, 56.5M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit UNIT completed and consumed the indicated resources. Feb 15 11:44:31 managed-node3 systemd[62093]: Reached target shutdown.target - Shutdown. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 156. Feb 15 11:44:31 managed-node3 systemd[62093]: Finished systemd-exit.service - Exit the Session. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 154. Feb 15 11:44:31 managed-node3 systemd[62093]: Reached target exit.target - Exit the Session. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 153. Feb 15 11:44:31 managed-node3 systemd[1]: user@2223.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user@2223.service has successfully entered the 'dead' state. Feb 15 11:44:31 managed-node3 systemd[1]: Stopped user@2223.service - User Manager for UID 2223. ░░ Subject: A stop job for unit user@2223.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user@2223.service has finished. ░░ ░░ The job identifier is 2741 and the job result is done. Feb 15 11:44:31 managed-node3 systemd[1]: user@2223.service: Consumed 2.126s CPU time, 71.1M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user@2223.service completed and consumed the indicated resources. Feb 15 11:44:31 managed-node3 systemd[1]: Stopping user-runtime-dir@2223.service - User Runtime Directory /run/user/2223... ░░ Subject: A stop job for unit user-runtime-dir@2223.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-runtime-dir@2223.service has begun execution. ░░ ░░ The job identifier is 2740. Feb 15 11:44:31 managed-node3 systemd[1]: run-user-2223.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-user-2223.mount has successfully entered the 'dead' state. Feb 15 11:44:31 managed-node3 systemd[1]: user-runtime-dir@2223.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user-runtime-dir@2223.service has successfully entered the 'dead' state. Feb 15 11:44:31 managed-node3 systemd[1]: Stopped user-runtime-dir@2223.service - User Runtime Directory /run/user/2223. ░░ Subject: A stop job for unit user-runtime-dir@2223.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-runtime-dir@2223.service has finished. ░░ ░░ The job identifier is 2740 and the job result is done. Feb 15 11:44:31 managed-node3 systemd[1]: Removed slice user-2223.slice - User Slice of UID 2223. ░░ Subject: A stop job for unit user-2223.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-2223.slice has finished. ░░ ░░ The job identifier is 2742 and the job result is done. Feb 15 11:44:31 managed-node3 systemd[1]: user-2223.slice: Consumed 2.154s CPU time, 71.2M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user-2223.slice completed and consumed the indicated resources. Feb 15 11:44:31 managed-node3 systemd-logind[56322]: Removed session 10. ░░ Subject: Session 10 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 10 has been terminated. Feb 15 11:44:31 managed-node3 python3.12[72807]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:44:32 managed-node3 python3.12[72939]: ansible-ansible.legacy.systemd Invoked with name=systemd-logind state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Feb 15 11:44:32 managed-node3 systemd[1]: Stopping systemd-logind.service - User Login Management... ░░ Subject: A stop job for unit systemd-logind.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-logind.service has begun execution. ░░ ░░ The job identifier is 2744. Feb 15 11:44:32 managed-node3 systemd[1]: systemd-logind.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-logind.service has successfully entered the 'dead' state. Feb 15 11:44:32 managed-node3 systemd[1]: Stopped systemd-logind.service - User Login Management. ░░ Subject: A stop job for unit systemd-logind.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-logind.service has finished. ░░ ░░ The job identifier is 2744 and the job result is done. Feb 15 11:44:32 managed-node3 python3.12[73084]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:44:32 managed-node3 systemd[1]: Starting modprobe@drm.service - Load Kernel Module drm... ░░ Subject: A start job for unit modprobe@drm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@drm.service has begun execution. ░░ ░░ The job identifier is 2825. Feb 15 11:44:32 managed-node3 systemd[1]: modprobe@drm.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit modprobe@drm.service has successfully entered the 'dead' state. Feb 15 11:44:32 managed-node3 systemd[1]: Finished modprobe@drm.service - Load Kernel Module drm. ░░ Subject: A start job for unit modprobe@drm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@drm.service has finished successfully. ░░ ░░ The job identifier is 2825. Feb 15 11:44:32 managed-node3 systemd[1]: Starting systemd-logind.service - User Login Management... ░░ Subject: A start job for unit systemd-logind.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-logind.service has begun execution. ░░ ░░ The job identifier is 2745. Feb 15 11:44:32 managed-node3 systemd-logind[73088]: New seat seat0. ░░ Subject: A new seat seat0 is now available ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new seat seat0 has been configured and is now available. Feb 15 11:44:32 managed-node3 systemd-logind[73088]: Watching system buttons on /dev/input/event0 (Power Button) Feb 15 11:44:32 managed-node3 systemd-logind[73088]: Watching system buttons on /dev/input/event1 (Sleep Button) Feb 15 11:44:32 managed-node3 systemd-logind[73088]: Watching system buttons on /dev/input/event2 (AT Translated Set 2 keyboard) Feb 15 11:44:32 managed-node3 systemd[1]: Started systemd-logind.service - User Login Management. ░░ Subject: A start job for unit systemd-logind.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-logind.service has finished successfully. ░░ ░░ The job identifier is 2745. Feb 15 11:44:33 managed-node3 python3.12[73225]: ansible-stat Invoked with path=/var/lib/systemd/linger/user_quadlet_pod follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:44:35 managed-node3 python3.12[73487]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:44:36 managed-node3 python3.12[73624]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:44:36 managed-node3 python3.12[73757]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:44:37 managed-node3 python3.12[73889]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:44:39 managed-node3 python3.12[74021]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:44:40 managed-node3 python3.12[74154]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:44:40 managed-node3 python3.12[74286]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:44:41 managed-node3 python3.12[74418]: ansible-stat Invoked with path=/run/user/2223 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:44:41 managed-node3 python3.12[74549]: ansible-stat Invoked with path=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:44:42 managed-node3 python3.12[74680]: ansible-file Invoked with path=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Feb 15 11:44:42 managed-node3 systemd[1]: Stopping session-8.scope - Session 8 of User root... ░░ Subject: A stop job for unit session-8.scope has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit session-8.scope has begun execution. ░░ ░░ The job identifier is 2908. Feb 15 11:44:42 managed-node3 sshd-session[57262]: error: mm_reap: preauth child terminated by signal 15 Feb 15 11:44:42 managed-node3 sshd-session[57262]: pam_systemd(sshd:session): Failed to release session: No session '8' known Feb 15 11:44:42 managed-node3 sshd-session[57262]: pam_unix(sshd:session): session closed for user root Feb 15 11:44:42 managed-node3 systemd[1]: session-8.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-8.scope has successfully entered the 'dead' state. Feb 15 11:44:42 managed-node3 systemd[1]: Stopped session-8.scope - Session 8 of User root. ░░ Subject: A stop job for unit session-8.scope has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit session-8.scope has finished. ░░ ░░ The job identifier is 2908 and the job result is done. Feb 15 11:44:42 managed-node3 systemd[1]: session-8.scope: Consumed 43.992s CPU time, 67M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-8.scope completed and consumed the indicated resources. Feb 15 11:44:42 managed-node3 systemd[1]: Stopping user@0.service - User Manager for UID 0... ░░ Subject: A stop job for unit user@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user@0.service has begun execution. ░░ ░░ The job identifier is 2907. Feb 15 11:44:42 managed-node3 systemd[57270]: Activating special unit exit.target... Feb 15 11:44:42 managed-node3 systemd[57270]: Stopped target default.target - Main User Target. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 19 and the job result is done. Feb 15 11:44:42 managed-node3 systemd[57270]: Stopped target basic.target - Basic System. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 27 and the job result is done. Feb 15 11:44:42 managed-node3 systemd[57270]: Stopped target paths.target - Paths. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 29 and the job result is done. Feb 15 11:44:42 managed-node3 systemd[57270]: Stopped target sockets.target - Sockets. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 28 and the job result is done. Feb 15 11:44:42 managed-node3 systemd[57270]: Stopped target timers.target - Timers. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 26 and the job result is done. Feb 15 11:44:42 managed-node3 systemd[57270]: Stopped systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 18 and the job result is done. Feb 15 11:44:42 managed-node3 systemd[57270]: Closed dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 25 and the job result is done. Feb 15 11:44:42 managed-node3 systemd[57270]: Stopped systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 20 and the job result is done. Feb 15 11:44:42 managed-node3 systemd[57270]: Removed slice app.slice - User Application Slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 23 and the job result is done. Feb 15 11:44:42 managed-node3 systemd[57270]: Reached target shutdown.target - Shutdown. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 16. Feb 15 11:44:42 managed-node3 systemd[57270]: Finished systemd-exit.service - Exit the Session. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 14. Feb 15 11:44:42 managed-node3 systemd[57270]: Reached target exit.target - Exit the Session. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Feb 15 11:44:42 managed-node3 systemd[1]: user@0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user@0.service has successfully entered the 'dead' state. Feb 15 11:44:42 managed-node3 systemd[1]: Stopped user@0.service - User Manager for UID 0. ░░ Subject: A stop job for unit user@0.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user@0.service has finished. ░░ ░░ The job identifier is 2907 and the job result is done. Feb 15 11:44:42 managed-node3 systemd[1]: Stopping user-runtime-dir@0.service - User Runtime Directory /run/user/0... ░░ Subject: A stop job for unit user-runtime-dir@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-runtime-dir@0.service has begun execution. ░░ ░░ The job identifier is 2906. Feb 15 11:44:42 managed-node3 systemd[1]: run-user-0.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-user-0.mount has successfully entered the 'dead' state. Feb 15 11:44:42 managed-node3 systemd[1]: user-runtime-dir@0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user-runtime-dir@0.service has successfully entered the 'dead' state. Feb 15 11:44:42 managed-node3 systemd[1]: Stopped user-runtime-dir@0.service - User Runtime Directory /run/user/0. ░░ Subject: A stop job for unit user-runtime-dir@0.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-runtime-dir@0.service has finished. ░░ ░░ The job identifier is 2906 and the job result is done. Feb 15 11:44:42 managed-node3 systemd[1]: Removed slice user-0.slice - User Slice of UID 0. ░░ Subject: A stop job for unit user-0.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-0.slice has finished. ░░ ░░ The job identifier is 2909 and the job result is done. Feb 15 11:44:42 managed-node3 systemd[1]: user-0.slice: Consumed 44.163s CPU time, 71.8M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user-0.slice completed and consumed the indicated resources. Feb 15 11:44:44 managed-node3 sshd-session[74705]: Accepted publickey for root from 10.31.42.96 port 58914 ssh2: RSA SHA256:9j1blwt3wcrRiGYZQ7ZGu9axm3cDklH6/z4c+Ee8CzE Feb 15 11:44:44 managed-node3 systemd[1]: Created slice user-0.slice - User Slice of UID 0. ░░ Subject: A start job for unit user-0.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-0.slice has finished successfully. ░░ ░░ The job identifier is 2989. Feb 15 11:44:44 managed-node3 systemd[1]: Starting user-runtime-dir@0.service - User Runtime Directory /run/user/0... ░░ Subject: A start job for unit user-runtime-dir@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has begun execution. ░░ ░░ The job identifier is 2911. Feb 15 11:44:44 managed-node3 systemd-logind[73088]: New session 11 of user root. ░░ Subject: A new session 11 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 11 has been created for the user root. ░░ ░░ The leading process of the session is 74705. Feb 15 11:44:44 managed-node3 systemd[1]: Finished user-runtime-dir@0.service - User Runtime Directory /run/user/0. ░░ Subject: A start job for unit user-runtime-dir@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has finished successfully. ░░ ░░ The job identifier is 2911. Feb 15 11:44:44 managed-node3 systemd[1]: Starting user@0.service - User Manager for UID 0... ░░ Subject: A start job for unit user@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has begun execution. ░░ ░░ The job identifier is 2991. Feb 15 11:44:44 managed-node3 systemd-logind[73088]: New session 12 of user root. ░░ Subject: A new session 12 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 12 has been created for the user root. ░░ ░░ The leading process of the session is 74710. Feb 15 11:44:44 managed-node3 (systemd)[74710]: pam_unix(systemd-user:session): session opened for user root(uid=0) by root(uid=0) Feb 15 11:44:44 managed-node3 systemd[74710]: Queued start job for default target default.target. Feb 15 11:44:44 managed-node3 systemd[74710]: Created slice app.slice - User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 6. Feb 15 11:44:44 managed-node3 systemd[74710]: grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes was skipped because of an unmet condition check (ConditionUser=!@system). ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 9. Feb 15 11:44:44 managed-node3 systemd[74710]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 10. Feb 15 11:44:44 managed-node3 systemd[74710]: Reached target paths.target - Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Feb 15 11:44:44 managed-node3 systemd[74710]: Reached target timers.target - Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 8. Feb 15 11:44:44 managed-node3 systemd[74710]: Starting dbus.socket - D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 12. Feb 15 11:44:44 managed-node3 systemd[74710]: Starting systemd-tmpfiles-setup.service - Create User Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 5. Feb 15 11:44:44 managed-node3 systemd[74710]: Finished systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 5. Feb 15 11:44:44 managed-node3 systemd[74710]: Listening on dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Feb 15 11:44:44 managed-node3 systemd[74710]: Reached target sockets.target - Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 11. Feb 15 11:44:44 managed-node3 systemd[74710]: Reached target basic.target - Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Feb 15 11:44:44 managed-node3 systemd[74710]: Reached target default.target - Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Feb 15 11:44:44 managed-node3 systemd[74710]: Startup finished in 115ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 0 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 115395 microseconds. Feb 15 11:44:44 managed-node3 systemd[1]: Started user@0.service - User Manager for UID 0. ░░ Subject: A start job for unit user@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has finished successfully. ░░ ░░ The job identifier is 2991. Feb 15 11:44:44 managed-node3 systemd[1]: Started session-11.scope - Session 11 of User root. ░░ Subject: A start job for unit session-11.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-11.scope has finished successfully. ░░ ░░ The job identifier is 3072. Feb 15 11:44:44 managed-node3 sshd-session[74705]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Feb 15 11:44:45 managed-node3 python3.12[74837]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:44:46 managed-node3 python3.12[74970]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:44:46 managed-node3 python3.12[75102]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:44:47 managed-node3 python3.12[75234]: ansible-stat Invoked with path=/run/user/2223 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:44:48 managed-node3 python3.12[75365]: ansible-stat Invoked with path=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:44:48 managed-node3 python3.12[75496]: ansible-file Invoked with path=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Feb 15 11:44:50 managed-node3 python3.12[75627]: ansible-stat Invoked with path=/run/user/2223 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:44:51 managed-node3 python3.12[75758]: ansible-user Invoked with name=user_quadlet_pod state=absent non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node3 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Feb 15 11:44:51 managed-node3 userdel[75760]: delete user 'user_quadlet_pod' Feb 15 11:44:51 managed-node3 userdel[75760]: removed group 'user_quadlet_pod' owned by 'user_quadlet_pod' Feb 15 11:44:51 managed-node3 userdel[75760]: removed shadow group 'user_quadlet_pod' owned by 'user_quadlet_pod' Feb 15 11:44:53 managed-node3 python3.12[76022]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:44:54 managed-node3 python3.12[76159]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Feb 15 11:44:54 managed-node3 python3.12[76291]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:44:57 managed-node3 python3.12[76424]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:44:58 managed-node3 python3.12[76557]: ansible-systemd Invoked with name=quadlet-pod-container.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Feb 15 11:44:58 managed-node3 systemd[1]: Reload requested from client PID 76560 ('systemctl') (unit session-11.scope)... Feb 15 11:44:58 managed-node3 systemd[1]: Reloading... Feb 15 11:44:58 managed-node3 systemd-rc-local-generator[76604]: /etc/rc.d/rc.local is not marked executable, skipping. Feb 15 11:44:58 managed-node3 systemd[1]: Reloading finished in 220 ms. Feb 15 11:44:58 managed-node3 systemd[1]: Stopping quadlet-pod-container.service... ░░ Subject: A stop job for unit quadlet-pod-container.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit quadlet-pod-container.service has begun execution. ░░ ░░ The job identifier is 3154. Feb 15 11:45:08 managed-node3 quadlet-pod-container[76616]: time="2025-02-15T11:45:08-05:00" level=warning msg="StopSignal SIGTERM failed to stop container quadlet-pod-container in 10 seconds, resorting to SIGKILL" Feb 15 11:45:08 managed-node3 podman[76616]: 2025-02-15 11:45:08.734843464 -0500 EST m=+10.055097610 container died 415ed1f9fb5de83ac3a901092719b57637979183127816e58ec461b78409710d (image=quay.io/libpod/testimage:20210610, name=quadlet-pod-container, PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Feb 15 11:45:08 managed-node3 systemd[1]: var-lib-containers-storage-overlay-f6bbc1336eba0fd5d15967e0f28f8470349e28a8684da59a27282eedf380dece-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-f6bbc1336eba0fd5d15967e0f28f8470349e28a8684da59a27282eedf380dece-merged.mount has successfully entered the 'dead' state. Feb 15 11:45:08 managed-node3 podman[76616]: 2025-02-15 11:45:08.791154998 -0500 EST m=+10.111408964 container remove 415ed1f9fb5de83ac3a901092719b57637979183127816e58ec461b78409710d (image=quay.io/libpod/testimage:20210610, name=quadlet-pod-container, pod_id=19b0871d3b22d540cf1291d03caf7517425b7d1b8a58e5ebd05a65e6b2b03404, io.buildah.version=1.21.0, PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Feb 15 11:45:08 managed-node3 quadlet-pod-container[76616]: 415ed1f9fb5de83ac3a901092719b57637979183127816e58ec461b78409710d Feb 15 11:45:08 managed-node3 podman[76616]: 2025-02-15 11:45:08.792474112 -0500 EST m=+10.112728139 pod stop 19b0871d3b22d540cf1291d03caf7517425b7d1b8a58e5ebd05a65e6b2b03404 (image=, name=quadlet-pod) Feb 15 11:45:08 managed-node3 systemd[1]: quadlet-pod-container.service: Main process exited, code=exited, status=137/n/a ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit quadlet-pod-container.service has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 137. Feb 15 11:45:08 managed-node3 systemd[1]: libpod-f5bcd2630f8b281b3fea21e5bb80b22b5a2efcf79c25870ad2c98f92002eff48.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-f5bcd2630f8b281b3fea21e5bb80b22b5a2efcf79c25870ad2c98f92002eff48.scope has successfully entered the 'dead' state. Feb 15 11:45:08 managed-node3 podman[76616]: 2025-02-15 11:45:08.81216126 -0500 EST m=+10.132415376 container died f5bcd2630f8b281b3fea21e5bb80b22b5a2efcf79c25870ad2c98f92002eff48 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Feb 15 11:45:08 managed-node3 kernel: podman0: port 1(veth0) entered disabled state Feb 15 11:45:08 managed-node3 kernel: veth0 (unregistering): left allmulticast mode Feb 15 11:45:08 managed-node3 kernel: veth0 (unregistering): left promiscuous mode Feb 15 11:45:08 managed-node3 kernel: podman0: port 1(veth0) entered disabled state Feb 15 11:45:08 managed-node3 NetworkManager[728]: [1739637908.8538] device (podman0): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Feb 15 11:45:08 managed-node3 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 3155. Feb 15 11:45:08 managed-node3 systemd[1]: run-netns-netns\x2dab15558e\x2d4fc6\x2d0acf\x2dcfe1\x2de1b9008d1380.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2dab15558e\x2d4fc6\x2d0acf\x2dcfe1\x2de1b9008d1380.mount has successfully entered the 'dead' state. Feb 15 11:45:08 managed-node3 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 3155. Feb 15 11:45:08 managed-node3 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-f5bcd2630f8b281b3fea21e5bb80b22b5a2efcf79c25870ad2c98f92002eff48-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-f5bcd2630f8b281b3fea21e5bb80b22b5a2efcf79c25870ad2c98f92002eff48-userdata-shm.mount has successfully entered the 'dead' state. Feb 15 11:45:08 managed-node3 systemd[1]: var-lib-containers-storage-overlay-eb0e645602601400e8cfb29b0332b5e883565087319edad6f53dbb9ebe13bbb8-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-eb0e645602601400e8cfb29b0332b5e883565087319edad6f53dbb9ebe13bbb8-merged.mount has successfully entered the 'dead' state. Feb 15 11:45:08 managed-node3 podman[76616]: 2025-02-15 11:45:08.912932677 -0500 EST m=+10.233186729 container cleanup f5bcd2630f8b281b3fea21e5bb80b22b5a2efcf79c25870ad2c98f92002eff48 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=19b0871d3b22d540cf1291d03caf7517425b7d1b8a58e5ebd05a65e6b2b03404, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Feb 15 11:45:08 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Feb 15 11:45:08 managed-node3 systemd[1]: Removed slice machine-libpod_pod_19b0871d3b22d540cf1291d03caf7517425b7d1b8a58e5ebd05a65e6b2b03404.slice - cgroup machine-libpod_pod_19b0871d3b22d540cf1291d03caf7517425b7d1b8a58e5ebd05a65e6b2b03404.slice. ░░ Subject: A stop job for unit machine-libpod_pod_19b0871d3b22d540cf1291d03caf7517425b7d1b8a58e5ebd05a65e6b2b03404.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_19b0871d3b22d540cf1291d03caf7517425b7d1b8a58e5ebd05a65e6b2b03404.slice has finished. ░░ ░░ The job identifier is 3234 and the job result is done. Feb 15 11:45:08 managed-node3 podman[76616]: 2025-02-15 11:45:08.924654632 -0500 EST m=+10.244908609 pod stop 19b0871d3b22d540cf1291d03caf7517425b7d1b8a58e5ebd05a65e6b2b03404 (image=, name=quadlet-pod) Feb 15 11:45:08 managed-node3 systemd[1]: machine-libpod_pod_19b0871d3b22d540cf1291d03caf7517425b7d1b8a58e5ebd05a65e6b2b03404.slice: Failed to open /run/systemd/transient/machine-libpod_pod_19b0871d3b22d540cf1291d03caf7517425b7d1b8a58e5ebd05a65e6b2b03404.slice: No such file or directory Feb 15 11:45:08 managed-node3 podman[76666]: 2025-02-15 11:45:08.970343182 -0500 EST m=+0.032566279 pod stop 19b0871d3b22d540cf1291d03caf7517425b7d1b8a58e5ebd05a65e6b2b03404 (image=, name=quadlet-pod) Feb 15 11:45:08 managed-node3 systemd[1]: machine-libpod_pod_19b0871d3b22d540cf1291d03caf7517425b7d1b8a58e5ebd05a65e6b2b03404.slice: Failed to open /run/systemd/transient/machine-libpod_pod_19b0871d3b22d540cf1291d03caf7517425b7d1b8a58e5ebd05a65e6b2b03404.slice: No such file or directory Feb 15 11:45:08 managed-node3 quadlet-pod-pod-pod[76666]: quadlet-pod Feb 15 11:45:08 managed-node3 systemd[1]: quadlet-pod-container.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-pod-container.service has entered the 'failed' state with result 'exit-code'. Feb 15 11:45:09 managed-node3 systemd[1]: Stopped quadlet-pod-container.service. ░░ Subject: A stop job for unit quadlet-pod-container.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit quadlet-pod-container.service has finished. ░░ ░░ The job identifier is 3154 and the job result is done. Feb 15 11:45:09 managed-node3 podman[76685]: 2025-02-15 11:45:09.091536838 -0500 EST m=+0.079618624 container remove f5bcd2630f8b281b3fea21e5bb80b22b5a2efcf79c25870ad2c98f92002eff48 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=19b0871d3b22d540cf1291d03caf7517425b7d1b8a58e5ebd05a65e6b2b03404, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Feb 15 11:45:09 managed-node3 systemd[1]: machine-libpod_pod_19b0871d3b22d540cf1291d03caf7517425b7d1b8a58e5ebd05a65e6b2b03404.slice: Failed to open /run/systemd/transient/machine-libpod_pod_19b0871d3b22d540cf1291d03caf7517425b7d1b8a58e5ebd05a65e6b2b03404.slice: No such file or directory Feb 15 11:45:09 managed-node3 podman[76685]: 2025-02-15 11:45:09.101356982 -0500 EST m=+0.089438764 pod remove 19b0871d3b22d540cf1291d03caf7517425b7d1b8a58e5ebd05a65e6b2b03404 (image=, name=quadlet-pod) Feb 15 11:45:09 managed-node3 quadlet-pod-pod-pod[76685]: 19b0871d3b22d540cf1291d03caf7517425b7d1b8a58e5ebd05a65e6b2b03404 Feb 15 11:45:09 managed-node3 systemd[1]: quadlet-pod-pod-pod.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-pod-pod-pod.service has successfully entered the 'dead' state. Feb 15 11:45:09 managed-node3 python3.12[76828]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-pod-container.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:45:10 managed-node3 python3.12[77092]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-pod-container.container state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Feb 15 11:45:10 managed-node3 python3.12[77223]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Feb 15 11:45:10 managed-node3 systemd[1]: Reload requested from client PID 77224 ('systemctl') (unit session-11.scope)... Feb 15 11:45:10 managed-node3 systemd[1]: Reloading... Feb 15 11:45:11 managed-node3 systemd-rc-local-generator[77269]: /etc/rc.d/rc.local is not marked executable, skipping. Feb 15 11:45:11 managed-node3 systemd[1]: Reloading finished in 202 ms. Feb 15 11:45:11 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Feb 15 11:45:12 managed-node3 python3.12[77547]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:45:12 managed-node3 podman[77548]: 2025-02-15 11:45:12.083383795 -0500 EST m=+0.027219024 image untag 29238fe0ea2d46765ca52e682e886bf4a885c74a7c84fe2d3c7f54495a5442b2 localhost/podman-pause:5.3.1-1733097600 Feb 15 11:45:12 managed-node3 podman[77548]: 2025-02-15 11:45:12.074171634 -0500 EST m=+0.018006874 image remove 29238fe0ea2d46765ca52e682e886bf4a885c74a7c84fe2d3c7f54495a5442b2 Feb 15 11:45:12 managed-node3 podman[77548]: 2025-02-15 11:45:12.099028047 -0500 EST m=+0.042863184 image untag 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Feb 15 11:45:12 managed-node3 podman[77548]: 2025-02-15 11:45:12.083392214 -0500 EST m=+0.027227310 image remove 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f Feb 15 11:45:12 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Feb 15 11:45:12 managed-node3 python3.12[77686]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:45:12 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Feb 15 11:45:13 managed-node3 python3.12[77825]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:45:13 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Feb 15 11:45:13 managed-node3 python3.12[77964]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:45:13 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Feb 15 11:45:13 managed-node3 python3.12[78102]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:45:13 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Feb 15 11:45:14 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Feb 15 11:45:14 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Feb 15 11:45:15 managed-node3 python3.12[78516]: ansible-service_facts Invoked Feb 15 11:45:17 managed-node3 python3.12[78757]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:45:18 managed-node3 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Feb 15 11:45:18 managed-node3 python3.12[78890]: ansible-systemd Invoked with name=quadlet-pod-pod-pod.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Feb 15 11:45:18 managed-node3 systemd[1]: Reload requested from client PID 78894 ('systemctl') (unit session-11.scope)... Feb 15 11:45:18 managed-node3 systemd[1]: Reloading... Feb 15 11:45:19 managed-node3 systemd-rc-local-generator[78936]: /etc/rc.d/rc.local is not marked executable, skipping. Feb 15 11:45:19 managed-node3 systemd[1]: Reloading finished in 206 ms. Feb 15 11:45:19 managed-node3 python3.12[79077]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-pod-pod.pod follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Feb 15 11:45:20 managed-node3 python3.12[79341]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-pod-pod.pod state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Feb 15 11:45:21 managed-node3 python3.12[79472]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Feb 15 11:45:21 managed-node3 systemd[1]: Reload requested from client PID 79473 ('systemctl') (unit session-11.scope)... Feb 15 11:45:21 managed-node3 systemd[1]: Reloading... Feb 15 11:45:21 managed-node3 systemd-rc-local-generator[79514]: /etc/rc.d/rc.local is not marked executable, skipping. Feb 15 11:45:21 managed-node3 systemd[1]: Reloading finished in 202 ms. Feb 15 11:45:21 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Feb 15 11:45:22 managed-node3 python3.12[79794]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:45:22 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Feb 15 11:45:22 managed-node3 python3.12[79933]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:45:22 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Feb 15 11:45:23 managed-node3 python3.12[80071]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:45:23 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Feb 15 11:45:23 managed-node3 python3.12[80209]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:45:23 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Feb 15 11:45:24 managed-node3 python3.12[80348]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Feb 15 11:45:24 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Feb 15 11:45:24 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Feb 15 11:45:25 managed-node3 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Feb 15 11:45:25 managed-node3 python3.12[80764]: ansible-service_facts Invoked Feb 15 11:45:27 managed-node3 python3.12[81005]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None PLAY RECAP ********************************************************************* managed-node3 : ok=371 changed=28 unreachable=0 failed=1 skipped=391 rescued=1 ignored=1 TASKS RECAP ******************************************************************** Saturday 15 February 2025 11:45:27 -0500 (0:00:00.421) 0:02:25.817 ***** =============================================================================== fedora.linux_system_roles.podman : Stop and disable service ------------ 11.18s /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 fedora.linux_system_roles.podman : Stop and disable service ------------ 11.00s /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.21s /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.17s /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.13s /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.00s /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Check files ------------------------------------------------------------- 1.81s /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:40 fedora.linux_system_roles.podman : Ensure container images are present --- 1.78s /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 fedora.linux_system_roles.podman : See if getsubids exists -------------- 1.72s /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 fedora.linux_system_roles.podman : Ensure container images are present --- 1.59s /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 fedora.linux_system_roles.podman : Start service ------------------------ 1.46s /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Gathering Facts --------------------------------------------------------- 1.38s /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:9 fedora.linux_system_roles.podman : Start service ------------------------ 1.20s /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 fedora.linux_system_roles.podman : Gather the package facts ------------- 1.06s /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : Gather the package facts ------------- 1.02s /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : Reload systemctl --------------------- 1.00s /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 fedora.linux_system_roles.podman : Gather the package facts ------------- 1.00s /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : Gather the package facts ------------- 0.95s /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : Ensure quadlet file is present ------- 0.92s /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 fedora.linux_system_roles.podman : Gather the package facts ------------- 0.90s /tmp/collections-WJe/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6