ansible-playbook [core 2.17.7] config file = None configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/local/lib/python3.12/site-packages/ansible ansible collection location = /tmp/collections-IyT executable location = /usr/local/bin/ansible-playbook python version = 3.12.8 (main, Dec 3 2024, 00:00:00) [GCC 14.2.1 20241104 (Red Hat 14.2.1-6)] (/usr/bin/python3.12) jinja version = 3.1.4 libyaml = True No config file found; using defaults running playbook inside collection fedora.linux_system_roles redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.profile_tasks to ansible.posix.profile_tasks Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_quadlet_pod.yml ************************************************ 2 plays in /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml PLAY [all] ********************************************************************* TASK [Include vault variables] ************************************************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:5 Monday 06 January 2025 13:44:50 -0500 (0:00:00.011) 0:00:00.011 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_test_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n35383939616163653333633431363463313831383037386236646138333162396161356130303461\n3932623930643263313563336163316337643562333936360a363538636631313039343233383732\n38666530383538656639363465313230343533386130303833336434303438333161656262346562\n3362626538613031640a663330613638366132356534363534353239616666653466353961323533\n6565\n" }, "mysql_container_root_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n61333932373230333539663035366431326163363166363036323963623131363530326231303634\n6635326161643165363366323062333334363730376631660a393566366139353861656364656661\n38653463363837336639363032646433666361646535366137303464623261313663643336306465\n6264663730656337310a343962353137386238383064646533366433333437303566656433386233\n34343235326665646661623131643335313236313131353661386338343366316261643634653633\n3832313034366536616531323963333234326461353130303532\n" } }, "ansible_included_var_files": [ "/tmp/podman-Y0F/tests/vars/vault-variables.yml" ], "changed": false } PLAY [Ensure that the role can manage quadlet pods] **************************** TASK [Gathering Facts] ********************************************************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:9 Monday 06 January 2025 13:44:50 -0500 (0:00:00.028) 0:00:00.040 ******** [WARNING]: Platform linux on host managed-node2 is using the discovered Python interpreter at /usr/bin/python3.12, but future installation of another Python interpreter could change the meaning of that path. See https://docs.ansible.com/ansible- core/2.17/reference_appendices/interpreter_discovery.html for more information. ok: [managed-node2] TASK [Run the role - root] ***************************************************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:34 Monday 06 January 2025 13:44:52 -0500 (0:00:01.503) 0:00:01.543 ******** included: fedora.linux_system_roles.podman for managed-node2 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Monday 06 January 2025 13:44:52 -0500 (0:00:00.055) 0:00:01.599 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Monday 06 January 2025 13:44:52 -0500 (0:00:00.024) 0:00:01.624 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Monday 06 January 2025 13:44:52 -0500 (0:00:00.037) 0:00:01.661 ******** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Monday 06 January 2025 13:44:52 -0500 (0:00:00.506) 0:00:02.167 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Monday 06 January 2025 13:44:52 -0500 (0:00:00.048) 0:00:02.215 ******** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Monday 06 January 2025 13:44:53 -0500 (0:00:00.431) 0:00:02.647 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Monday 06 January 2025 13:44:53 -0500 (0:00:00.056) 0:00:02.703 ******** ok: [managed-node2] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Monday 06 January 2025 13:44:53 -0500 (0:00:00.075) 0:00:02.779 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Monday 06 January 2025 13:44:54 -0500 (0:00:01.251) 0:00:04.031 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Monday 06 January 2025 13:44:54 -0500 (0:00:00.090) 0:00:04.122 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Monday 06 January 2025 13:44:55 -0500 (0:00:00.120) 0:00:04.243 ******** skipping: [managed-node2] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Monday 06 January 2025 13:44:55 -0500 (0:00:00.118) 0:00:04.361 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Monday 06 January 2025 13:44:55 -0500 (0:00:00.156) 0:00:04.518 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Monday 06 January 2025 13:44:55 -0500 (0:00:00.085) 0:00:04.603 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.025609", "end": "2025-01-06 13:44:55.922805", "rc": 0, "start": "2025-01-06 13:44:55.897196" } STDOUT: podman version 5.3.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Monday 06 January 2025 13:44:56 -0500 (0:00:00.619) 0:00:05.223 ******** ok: [managed-node2] => { "ansible_facts": { "podman_version": "5.3.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Monday 06 January 2025 13:44:56 -0500 (0:00:00.063) 0:00:05.287 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Monday 06 January 2025 13:44:56 -0500 (0:00:00.083) 0:00:05.371 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Monday 06 January 2025 13:44:56 -0500 (0:00:00.158) 0:00:05.529 ******** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Monday 06 January 2025 13:44:56 -0500 (0:00:00.164) 0:00:05.694 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"5.0\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Monday 06 January 2025 13:44:56 -0500 (0:00:00.122) 0:00:05.816 ******** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Monday 06 January 2025 13:44:56 -0500 (0:00:00.141) 0:00:05.958 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 06 January 2025 13:44:56 -0500 (0:00:00.211) 0:00:06.169 ******** ok: [managed-node2] => { "ansible_facts": { "getent_passwd": { "root": [ "x", "0", "0", "Super User", "/root", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 06 January 2025 13:44:57 -0500 (0:00:00.719) 0:00:06.889 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Debug] ******************************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Monday 06 January 2025 13:44:57 -0500 (0:00:00.080) 0:00:06.970 ******** ok: [managed-node2] => {} MSG: item {} TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:20 Monday 06 January 2025 13:44:57 -0500 (0:00:00.052) 0:00:07.022 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35 Monday 06 January 2025 13:44:57 -0500 (0:00:00.045) 0:00:07.067 ******** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736188886.4685817, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736188861.3885152, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3940759558", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:46 Monday 06 January 2025 13:44:58 -0500 (0:00:00.380) 0:00:07.448 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:51 Monday 06 January 2025 13:44:58 -0500 (0:00:00.031) 0:00:07.479 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:56 Monday 06 January 2025 13:44:58 -0500 (0:00:00.030) 0:00:07.510 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:69 Monday 06 January 2025 13:44:58 -0500 (0:00:00.031) 0:00:07.541 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74 Monday 06 January 2025 13:44:58 -0500 (0:00:00.030) 0:00:07.572 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79 Monday 06 January 2025 13:44:58 -0500 (0:00:00.031) 0:00:07.604 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Monday 06 January 2025 13:44:58 -0500 (0:00:00.029) 0:00:07.634 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:96 Monday 06 January 2025 13:44:58 -0500 (0:00:00.031) 0:00:07.665 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Monday 06 January 2025 13:44:58 -0500 (0:00:00.030) 0:00:07.695 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Monday 06 January 2025 13:44:58 -0500 (0:00:00.091) 0:00:07.787 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Monday 06 January 2025 13:44:58 -0500 (0:00:00.091) 0:00:07.878 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Monday 06 January 2025 13:44:58 -0500 (0:00:00.036) 0:00:07.914 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Monday 06 January 2025 13:44:58 -0500 (0:00:00.033) 0:00:07.947 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Monday 06 January 2025 13:44:58 -0500 (0:00:00.093) 0:00:08.041 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Monday 06 January 2025 13:44:58 -0500 (0:00:00.035) 0:00:08.076 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Monday 06 January 2025 13:44:58 -0500 (0:00:00.047) 0:00:08.123 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Monday 06 January 2025 13:44:58 -0500 (0:00:00.075) 0:00:08.199 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Monday 06 January 2025 13:44:59 -0500 (0:00:00.035) 0:00:08.235 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Monday 06 January 2025 13:44:59 -0500 (0:00:00.041) 0:00:08.276 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Monday 06 January 2025 13:44:59 -0500 (0:00:00.094) 0:00:08.370 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Monday 06 January 2025 13:44:59 -0500 (0:00:00.051) 0:00:08.422 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Monday 06 January 2025 13:44:59 -0500 (0:00:00.031) 0:00:08.453 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Monday 06 January 2025 13:44:59 -0500 (0:00:00.032) 0:00:08.486 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Monday 06 January 2025 13:44:59 -0500 (0:00:00.039) 0:00:08.525 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Monday 06 January 2025 13:44:59 -0500 (0:00:00.045) 0:00:08.571 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Monday 06 January 2025 13:44:59 -0500 (0:00:00.047) 0:00:08.618 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Monday 06 January 2025 13:44:59 -0500 (0:00:00.052) 0:00:08.671 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Monday 06 January 2025 13:44:59 -0500 (0:00:00.042) 0:00:08.714 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Monday 06 January 2025 13:44:59 -0500 (0:00:00.031) 0:00:08.745 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Monday 06 January 2025 13:44:59 -0500 (0:00:00.029) 0:00:08.775 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Monday 06 January 2025 13:44:59 -0500 (0:00:00.032) 0:00:08.808 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Monday 06 January 2025 13:44:59 -0500 (0:00:00.168) 0:00:08.976 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Pod": { "PodName": "quadlet-pod" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Monday 06 January 2025 13:44:59 -0500 (0:00:00.064) 0:00:09.041 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Monday 06 January 2025 13:44:59 -0500 (0:00:00.104) 0:00:09.145 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Monday 06 January 2025 13:45:00 -0500 (0:00:00.094) 0:00:09.240 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-pod", "__podman_quadlet_type": "pod", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Monday 06 January 2025 13:45:00 -0500 (0:00:00.114) 0:00:09.354 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 06 January 2025 13:45:00 -0500 (0:00:00.116) 0:00:09.471 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 06 January 2025 13:45:00 -0500 (0:00:00.062) 0:00:09.534 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Debug] ******************************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Monday 06 January 2025 13:45:00 -0500 (0:00:00.057) 0:00:09.591 ******** ok: [managed-node2] => {} MSG: item { "Pod": { "PodName": "quadlet-pod" }, "name": "quadlet-pod-pod", "type": "pod" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:20 Monday 06 January 2025 13:45:00 -0500 (0:00:00.062) 0:00:09.654 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35 Monday 06 January 2025 13:45:00 -0500 (0:00:00.071) 0:00:09.725 ******** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736188886.4685817, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736188861.3885152, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3940759558", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:46 Monday 06 January 2025 13:45:00 -0500 (0:00:00.440) 0:00:10.166 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:51 Monday 06 January 2025 13:45:00 -0500 (0:00:00.052) 0:00:10.219 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:56 Monday 06 January 2025 13:45:01 -0500 (0:00:00.050) 0:00:10.270 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:69 Monday 06 January 2025 13:45:01 -0500 (0:00:00.052) 0:00:10.322 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74 Monday 06 January 2025 13:45:01 -0500 (0:00:00.053) 0:00:10.376 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79 Monday 06 January 2025 13:45:01 -0500 (0:00:00.052) 0:00:10.428 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Monday 06 January 2025 13:45:01 -0500 (0:00:00.051) 0:00:10.479 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:96 Monday 06 January 2025 13:45:01 -0500 (0:00:00.050) 0:00:10.530 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Monday 06 January 2025 13:45:01 -0500 (0:00:00.051) 0:00:10.581 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-pod-pod.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Monday 06 January 2025 13:45:01 -0500 (0:00:00.127) 0:00:10.709 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Monday 06 January 2025 13:45:01 -0500 (0:00:00.056) 0:00:10.766 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Monday 06 January 2025 13:45:01 -0500 (0:00:00.127) 0:00:10.893 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-pod-pod.pod", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Monday 06 January 2025 13:45:01 -0500 (0:00:00.254) 0:00:11.148 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Monday 06 January 2025 13:45:01 -0500 (0:00:00.072) 0:00:11.221 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Monday 06 January 2025 13:45:02 -0500 (0:00:00.045) 0:00:11.266 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Monday 06 January 2025 13:45:02 -0500 (0:00:00.092) 0:00:11.359 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Monday 06 January 2025 13:45:02 -0500 (0:00:00.065) 0:00:11.424 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Monday 06 January 2025 13:45:02 -0500 (0:00:00.032) 0:00:11.457 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Monday 06 January 2025 13:45:02 -0500 (0:00:00.030) 0:00:11.487 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Monday 06 January 2025 13:45:02 -0500 (0:00:00.028) 0:00:11.516 ******** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Monday 06 January 2025 13:45:02 -0500 (0:00:00.028) 0:00:11.544 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Monday 06 January 2025 13:45:02 -0500 (0:00:00.028) 0:00:11.573 ******** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Monday 06 January 2025 13:45:02 -0500 (0:00:00.510) 0:00:12.084 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Monday 06 January 2025 13:45:02 -0500 (0:00:00.052) 0:00:12.136 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Monday 06 January 2025 13:45:02 -0500 (0:00:00.078) 0:00:12.215 ******** changed: [managed-node2] => { "changed": true, "checksum": "1884c880482430d8bf2e944b003734fb8b7a462d", "dest": "/etc/containers/systemd/quadlet-pod-pod.pod", "gid": 0, "group": "root", "md5sum": "43c9e9c2ff3ad9cd27c1f2d12f03aee0", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 70, "src": "/root/.ansible/tmp/ansible-tmp-1736189103.0489454-17620-77377462417765/.source.pod", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Monday 06 January 2025 13:45:03 -0500 (0:00:00.904) 0:00:13.119 ******** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Monday 06 January 2025 13:45:05 -0500 (0:00:01.155) 0:00:14.275 ******** changed: [managed-node2] => { "changed": true, "name": "quadlet-pod-pod-pod.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "-.mount systemd-journald.socket system.slice sysinit.target basic.target network-online.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-pod-pod-pod.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveTasksMax": "22349", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/quadlet-pod-pod-pod.pod-id ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/quadlet-pod-pod-pod.pod-id ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPre": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/quadlet-pod-pod-pod.pid --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPreEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/quadlet-pod-pod-pod.pid --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --force ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --force ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-pod-pod-pod.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-pod-pod.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3130605568", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-pod-pod.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/quadlet-pod-pod-pod.pid", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice -.mount sysinit.target", "RequiresMountsFor": "/run/containers", "Restart": "on-failure", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-pod-pod.pod", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-pod-pod", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "forking", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Monday 06 January 2025 13:45:06 -0500 (0:00:00.972) 0:00:15.247 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Monday 06 January 2025 13:45:06 -0500 (0:00:00.071) 0:00:15.319 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "quadlet-pod-container", "Exec": "/bin/busybox-extras httpd -f -p 80", "Image": "quay.io/libpod/testimage:20210610", "Pod": "quadlet-pod-pod.pod" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Monday 06 January 2025 13:45:06 -0500 (0:00:00.073) 0:00:15.392 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Monday 06 January 2025 13:45:06 -0500 (0:00:00.227) 0:00:15.620 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Monday 06 January 2025 13:45:06 -0500 (0:00:00.065) 0:00:15.686 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-container", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Monday 06 January 2025 13:45:06 -0500 (0:00:00.084) 0:00:15.770 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 06 January 2025 13:45:06 -0500 (0:00:00.097) 0:00:15.868 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 06 January 2025 13:45:06 -0500 (0:00:00.057) 0:00:15.926 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Debug] ******************************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Monday 06 January 2025 13:45:06 -0500 (0:00:00.058) 0:00:15.984 ******** ok: [managed-node2] => {} MSG: item { "Container": { "ContainerName": "quadlet-pod-container", "Exec": "/bin/busybox-extras httpd -f -p 80", "Image": "quay.io/libpod/testimage:20210610", "Pod": "quadlet-pod-pod.pod" }, "Install": { "WantedBy": "default.target" }, "name": "quadlet-pod-container", "type": "container" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:20 Monday 06 January 2025 13:45:06 -0500 (0:00:00.057) 0:00:16.042 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35 Monday 06 January 2025 13:45:06 -0500 (0:00:00.072) 0:00:16.114 ******** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736188886.4685817, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736188861.3885152, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3940759558", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:46 Monday 06 January 2025 13:45:07 -0500 (0:00:00.406) 0:00:16.521 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:51 Monday 06 January 2025 13:45:07 -0500 (0:00:00.048) 0:00:16.570 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:56 Monday 06 January 2025 13:45:07 -0500 (0:00:00.043) 0:00:16.613 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:69 Monday 06 January 2025 13:45:07 -0500 (0:00:00.033) 0:00:16.647 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74 Monday 06 January 2025 13:45:07 -0500 (0:00:00.031) 0:00:16.679 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79 Monday 06 January 2025 13:45:07 -0500 (0:00:00.032) 0:00:16.711 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Monday 06 January 2025 13:45:07 -0500 (0:00:00.030) 0:00:16.742 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:96 Monday 06 January 2025 13:45:07 -0500 (0:00:00.032) 0:00:16.775 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Monday 06 January 2025 13:45:07 -0500 (0:00:00.036) 0:00:16.812 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/libpod/testimage:20210610" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-container.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Monday 06 January 2025 13:45:07 -0500 (0:00:00.065) 0:00:16.877 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Monday 06 January 2025 13:45:07 -0500 (0:00:00.038) 0:00:16.915 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Monday 06 January 2025 13:45:07 -0500 (0:00:00.033) 0:00:16.948 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [ "quay.io/libpod/testimage:20210610" ], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-pod-container.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Monday 06 January 2025 13:45:07 -0500 (0:00:00.083) 0:00:17.032 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Monday 06 January 2025 13:45:07 -0500 (0:00:00.075) 0:00:17.108 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Monday 06 January 2025 13:45:07 -0500 (0:00:00.028) 0:00:17.137 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Monday 06 January 2025 13:45:07 -0500 (0:00:00.065) 0:00:17.202 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Monday 06 January 2025 13:45:08 -0500 (0:00:00.051) 0:00:17.254 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Monday 06 January 2025 13:45:08 -0500 (0:00:00.029) 0:00:17.284 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Monday 06 January 2025 13:45:08 -0500 (0:00:00.030) 0:00:17.315 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Monday 06 January 2025 13:45:08 -0500 (0:00:00.033) 0:00:17.348 ******** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Monday 06 January 2025 13:45:08 -0500 (0:00:00.038) 0:00:17.387 ******** ok: [managed-node2] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Monday 06 January 2025 13:45:10 -0500 (0:00:02.398) 0:00:19.785 ******** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 33, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Monday 06 January 2025 13:45:10 -0500 (0:00:00.435) 0:00:20.221 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Monday 06 January 2025 13:45:11 -0500 (0:00:00.050) 0:00:20.272 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Monday 06 January 2025 13:45:11 -0500 (0:00:00.041) 0:00:20.313 ******** changed: [managed-node2] => { "changed": true, "checksum": "f0b5c8159fc3c65bf9310a371751609e4c1ba4c3", "dest": "/etc/containers/systemd/quadlet-pod-container.container", "gid": 0, "group": "root", "md5sum": "daaf6e904ff3c17edeb801084cfe256f", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 230, "src": "/root/.ansible/tmp/ansible-tmp-1736189111.1390615-17972-280895023868873/.source.container", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Monday 06 January 2025 13:45:11 -0500 (0:00:00.799) 0:00:21.113 ******** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Monday 06 January 2025 13:45:12 -0500 (0:00:00.806) 0:00:21.920 ******** changed: [managed-node2] => { "changed": true, "name": "quadlet-pod-container.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "system.slice network-online.target systemd-journald.socket sysinit.target -.mount quadlet-pod-pod-pod.service basic.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target multi-user.target", "BindsTo": "quadlet-pod-pod-pod.service", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuset io memory pids", "Description": "quadlet-pod-container.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveTasksMax": "22349", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-pod-container.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-pod-container.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-pod-container.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-pod-container.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-pod-container.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-container.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3111518208", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-container.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "continue", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice -.mount sysinit.target", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-pod-container.container", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-container", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "quadlet-pod-pod-pod.service multi-user.target", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Monday 06 January 2025 13:45:13 -0500 (0:00:00.744) 0:00:22.665 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Monday 06 January 2025 13:45:13 -0500 (0:00:00.044) 0:00:22.709 ******** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Monday 06 January 2025 13:45:13 -0500 (0:00:00.034) 0:00:22.744 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Monday 06 January 2025 13:45:13 -0500 (0:00:00.033) 0:00:22.778 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Check files] ************************************************************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:40 Monday 06 January 2025 13:45:13 -0500 (0:00:00.128) 0:00:22.906 ******** ok: [managed-node2] => (item=quadlet-pod-container.container) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "cat", "/etc/containers/systemd/quadlet-pod-container.container" ], "delta": "0:00:01.004860", "end": "2025-01-06 13:45:15.029490", "item": "quadlet-pod-container.container", "rc": 0, "start": "2025-01-06 13:45:14.024630" } STDOUT: # # Ansible managed # # system_role:podman [Install] WantedBy=default.target [Container] Image=quay.io/libpod/testimage:20210610 ContainerName=quadlet-pod-container Pod=quadlet-pod-pod.pod Exec=/bin/busybox-extras httpd -f -p 80 ok: [managed-node2] => (item=quadlet-pod-pod.pod) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "cat", "/etc/containers/systemd/quadlet-pod-pod.pod" ], "delta": "0:00:00.003053", "end": "2025-01-06 13:45:15.376435", "item": "quadlet-pod-pod.pod", "rc": 0, "start": "2025-01-06 13:45:15.373382" } STDOUT: # # Ansible managed # # system_role:podman [Pod] PodName=quadlet-pod TASK [Check pod] *************************************************************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:49 Monday 06 January 2025 13:45:15 -0500 (0:00:01.781) 0:00:24.688 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "pod", "inspect", "quadlet-pod", "--format", "{{range .Containers}}{{.Name}}\n{{end}}" ], "delta": "0:00:00.038828", "end": "2025-01-06 13:45:15.839734", "failed_when_result": false, "rc": 0, "start": "2025-01-06 13:45:15.800906" } STDOUT: quadlet-pod-infra quadlet-pod-container TASK [Create user for testing] ************************************************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:57 Monday 06 January 2025 13:45:15 -0500 (0:00:00.491) 0:00:25.179 ******** changed: [managed-node2] => { "changed": true, "comment": "", "create_home": true, "group": 2223, "home": "/home/user_quadlet_pod", "name": "user_quadlet_pod", "shell": "/bin/bash", "state": "present", "system": false, "uid": 2223 } TASK [Run the role - user] ***************************************************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:62 Monday 06 January 2025 13:45:16 -0500 (0:00:00.744) 0:00:25.924 ******** included: fedora.linux_system_roles.podman for managed-node2 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Monday 06 January 2025 13:45:16 -0500 (0:00:00.150) 0:00:26.074 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Monday 06 January 2025 13:45:16 -0500 (0:00:00.103) 0:00:26.178 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Monday 06 January 2025 13:45:17 -0500 (0:00:00.070) 0:00:26.248 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Monday 06 January 2025 13:45:17 -0500 (0:00:00.085) 0:00:26.334 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Monday 06 January 2025 13:45:17 -0500 (0:00:00.068) 0:00:26.402 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Monday 06 January 2025 13:45:17 -0500 (0:00:00.048) 0:00:26.450 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Monday 06 January 2025 13:45:17 -0500 (0:00:00.046) 0:00:26.497 ******** ok: [managed-node2] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Monday 06 January 2025 13:45:17 -0500 (0:00:00.117) 0:00:26.614 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Monday 06 January 2025 13:45:18 -0500 (0:00:00.809) 0:00:27.424 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Monday 06 January 2025 13:45:18 -0500 (0:00:00.030) 0:00:27.454 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Monday 06 January 2025 13:45:18 -0500 (0:00:00.039) 0:00:27.493 ******** skipping: [managed-node2] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Monday 06 January 2025 13:45:18 -0500 (0:00:00.035) 0:00:27.529 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Monday 06 January 2025 13:45:18 -0500 (0:00:00.090) 0:00:27.619 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Monday 06 January 2025 13:45:18 -0500 (0:00:00.036) 0:00:27.656 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.024854", "end": "2025-01-06 13:45:18.754990", "rc": 0, "start": "2025-01-06 13:45:18.730136" } STDOUT: podman version 5.3.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Monday 06 January 2025 13:45:18 -0500 (0:00:00.390) 0:00:28.046 ******** ok: [managed-node2] => { "ansible_facts": { "podman_version": "5.3.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Monday 06 January 2025 13:45:18 -0500 (0:00:00.031) 0:00:28.078 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Monday 06 January 2025 13:45:18 -0500 (0:00:00.046) 0:00:28.125 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Monday 06 January 2025 13:45:18 -0500 (0:00:00.046) 0:00:28.172 ******** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Monday 06 January 2025 13:45:18 -0500 (0:00:00.039) 0:00:28.211 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"5.0\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Monday 06 January 2025 13:45:19 -0500 (0:00:00.054) 0:00:28.265 ******** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Monday 06 January 2025 13:45:19 -0500 (0:00:00.054) 0:00:28.320 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 06 January 2025 13:45:19 -0500 (0:00:00.062) 0:00:28.382 ******** ok: [managed-node2] => { "ansible_facts": { "getent_passwd": { "user_quadlet_pod": [ "x", "2223", "2223", "", "/home/user_quadlet_pod", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 06 January 2025 13:45:19 -0500 (0:00:00.395) 0:00:28.778 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Debug] ******************************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Monday 06 January 2025 13:45:19 -0500 (0:00:00.057) 0:00:28.835 ******** ok: [managed-node2] => {} MSG: item {} TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:20 Monday 06 January 2025 13:45:19 -0500 (0:00:00.050) 0:00:28.886 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35 Monday 06 January 2025 13:45:19 -0500 (0:00:00.084) 0:00:28.971 ******** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736188886.4685817, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736188861.3885152, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3940759558", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:46 Monday 06 January 2025 13:45:20 -0500 (0:00:00.507) 0:00:29.478 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.003865", "end": "2025-01-06 13:45:20.659906", "rc": 0, "start": "2025-01-06 13:45:20.656041" } STDOUT: 0: user_quadlet_pod 589824 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:51 Monday 06 January 2025 13:45:20 -0500 (0:00:00.499) 0:00:29.978 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.005411", "end": "2025-01-06 13:45:21.130904", "rc": 0, "start": "2025-01-06 13:45:21.125493" } STDOUT: 0: user_quadlet_pod 589824 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:56 Monday 06 January 2025 13:45:21 -0500 (0:00:00.449) 0:00:30.428 ******** ok: [managed-node2] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 589824 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 589824 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:69 Monday 06 January 2025 13:45:21 -0500 (0:00:00.082) 0:00:30.510 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74 Monday 06 January 2025 13:45:21 -0500 (0:00:00.130) 0:00:30.641 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79 Monday 06 January 2025 13:45:21 -0500 (0:00:00.052) 0:00:30.694 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Monday 06 January 2025 13:45:21 -0500 (0:00:00.053) 0:00:30.747 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:96 Monday 06 January 2025 13:45:21 -0500 (0:00:00.051) 0:00:30.799 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Monday 06 January 2025 13:45:21 -0500 (0:00:00.065) 0:00:30.865 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_container_conf_file": "/root/.config/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/root/.config/containers/policy.json", "__podman_registries_conf_file": "/root/.config/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/root/.config/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Monday 06 January 2025 13:45:21 -0500 (0:00:00.073) 0:00:30.938 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Monday 06 January 2025 13:45:21 -0500 (0:00:00.093) 0:00:31.032 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Monday 06 January 2025 13:45:21 -0500 (0:00:00.049) 0:00:31.082 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Monday 06 January 2025 13:45:21 -0500 (0:00:00.051) 0:00:31.134 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Monday 06 January 2025 13:45:22 -0500 (0:00:00.115) 0:00:31.249 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Monday 06 January 2025 13:45:22 -0500 (0:00:00.053) 0:00:31.303 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Monday 06 January 2025 13:45:22 -0500 (0:00:00.059) 0:00:31.362 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Monday 06 January 2025 13:45:22 -0500 (0:00:00.114) 0:00:31.476 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Monday 06 January 2025 13:45:22 -0500 (0:00:00.052) 0:00:31.529 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Monday 06 January 2025 13:45:22 -0500 (0:00:00.049) 0:00:31.579 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Monday 06 January 2025 13:45:22 -0500 (0:00:00.113) 0:00:31.693 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Monday 06 January 2025 13:45:22 -0500 (0:00:00.050) 0:00:31.743 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Monday 06 January 2025 13:45:22 -0500 (0:00:00.059) 0:00:31.802 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Monday 06 January 2025 13:45:22 -0500 (0:00:00.145) 0:00:31.948 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Monday 06 January 2025 13:45:22 -0500 (0:00:00.057) 0:00:32.005 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Monday 06 January 2025 13:45:22 -0500 (0:00:00.060) 0:00:32.065 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Monday 06 January 2025 13:45:22 -0500 (0:00:00.066) 0:00:32.132 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Monday 06 January 2025 13:45:22 -0500 (0:00:00.039) 0:00:32.171 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Monday 06 January 2025 13:45:22 -0500 (0:00:00.036) 0:00:32.208 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Monday 06 January 2025 13:45:23 -0500 (0:00:00.036) 0:00:32.244 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Monday 06 January 2025 13:45:23 -0500 (0:00:00.032) 0:00:32.277 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Monday 06 January 2025 13:45:23 -0500 (0:00:00.028) 0:00:32.305 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Monday 06 January 2025 13:45:23 -0500 (0:00:00.084) 0:00:32.389 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Pod": { "PodName": "quadlet-pod" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Monday 06 January 2025 13:45:23 -0500 (0:00:00.041) 0:00:32.430 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_pod" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Monday 06 January 2025 13:45:23 -0500 (0:00:00.048) 0:00:32.479 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Monday 06 January 2025 13:45:23 -0500 (0:00:00.044) 0:00:32.523 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-pod", "__podman_quadlet_type": "pod", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Monday 06 January 2025 13:45:23 -0500 (0:00:00.077) 0:00:32.600 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 06 January 2025 13:45:23 -0500 (0:00:00.187) 0:00:32.788 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 06 January 2025 13:45:23 -0500 (0:00:00.062) 0:00:32.850 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Debug] ******************************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Monday 06 January 2025 13:45:23 -0500 (0:00:00.068) 0:00:32.919 ******** ok: [managed-node2] => {} MSG: item { "Pod": { "PodName": "quadlet-pod" }, "name": "quadlet-pod-pod", "type": "pod" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:20 Monday 06 January 2025 13:45:23 -0500 (0:00:00.057) 0:00:32.976 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35 Monday 06 January 2025 13:45:23 -0500 (0:00:00.067) 0:00:33.043 ******** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736188886.4685817, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736188861.3885152, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3940759558", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:46 Monday 06 January 2025 13:45:24 -0500 (0:00:00.534) 0:00:33.578 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.004084", "end": "2025-01-06 13:45:24.701061", "rc": 0, "start": "2025-01-06 13:45:24.696977" } STDOUT: 0: user_quadlet_pod 589824 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:51 Monday 06 January 2025 13:45:24 -0500 (0:00:00.445) 0:00:34.023 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.004808", "end": "2025-01-06 13:45:25.156194", "rc": 0, "start": "2025-01-06 13:45:25.151386" } STDOUT: 0: user_quadlet_pod 589824 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:56 Monday 06 January 2025 13:45:25 -0500 (0:00:00.426) 0:00:34.449 ******** ok: [managed-node2] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 589824 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 589824 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:69 Monday 06 January 2025 13:45:25 -0500 (0:00:00.050) 0:00:34.500 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74 Monday 06 January 2025 13:45:25 -0500 (0:00:00.033) 0:00:34.534 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79 Monday 06 January 2025 13:45:25 -0500 (0:00:00.031) 0:00:34.566 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Monday 06 January 2025 13:45:25 -0500 (0:00:00.033) 0:00:34.599 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:96 Monday 06 January 2025 13:45:25 -0500 (0:00:00.031) 0:00:34.630 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Monday 06 January 2025 13:45:25 -0500 (0:00:00.032) 0:00:34.663 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-pod-pod.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_pod", "__podman_xdg_runtime_dir": "/run/user/2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Monday 06 January 2025 13:45:25 -0500 (0:00:00.071) 0:00:34.735 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_pod/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Monday 06 January 2025 13:45:25 -0500 (0:00:00.065) 0:00:34.800 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Monday 06 January 2025 13:45:25 -0500 (0:00:00.062) 0:00:34.863 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Monday 06 January 2025 13:45:25 -0500 (0:00:00.168) 0:00:35.032 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Monday 06 January 2025 13:45:25 -0500 (0:00:00.057) 0:00:35.090 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Monday 06 January 2025 13:45:25 -0500 (0:00:00.039) 0:00:35.130 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Monday 06 January 2025 13:45:25 -0500 (0:00:00.082) 0:00:35.212 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Monday 06 January 2025 13:45:26 -0500 (0:00:00.051) 0:00:35.264 ******** changed: [managed-node2] => { "changed": true, "cmd": [ "loginctl", "enable-linger", "user_quadlet_pod" ], "delta": "0:00:00.014009", "end": "2025-01-06 13:45:26.365015", "rc": 0, "start": "2025-01-06 13:45:26.351006" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Monday 06 January 2025 13:45:26 -0500 (0:00:00.446) 0:00:35.711 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Monday 06 January 2025 13:45:26 -0500 (0:00:00.056) 0:00:35.767 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_item_state | d('present') == 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Monday 06 January 2025 13:45:26 -0500 (0:00:00.036) 0:00:35.804 ******** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Monday 06 January 2025 13:45:26 -0500 (0:00:00.075) 0:00:35.879 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Monday 06 January 2025 13:45:26 -0500 (0:00:00.033) 0:00:35.913 ******** changed: [managed-node2] => { "changed": true, "gid": 2223, "group": "user_quadlet_pod", "mode": "0755", "owner": "user_quadlet_pod", "path": "/home/user_quadlet_pod/.config/containers/systemd", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 6, "state": "directory", "uid": 2223 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Monday 06 January 2025 13:45:27 -0500 (0:00:00.401) 0:00:36.315 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Monday 06 January 2025 13:45:27 -0500 (0:00:00.058) 0:00:36.373 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Monday 06 January 2025 13:45:27 -0500 (0:00:00.052) 0:00:36.426 ******** changed: [managed-node2] => { "changed": true, "checksum": "1884c880482430d8bf2e944b003734fb8b7a462d", "dest": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "gid": 2223, "group": "user_quadlet_pod", "md5sum": "43c9e9c2ff3ad9cd27c1f2d12f03aee0", "mode": "0644", "owner": "user_quadlet_pod", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 70, "src": "/root/.ansible/tmp/ansible-tmp-1736189127.2591572-18736-229058402969634/.source.pod", "state": "file", "uid": 2223 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Monday 06 January 2025 13:45:27 -0500 (0:00:00.752) 0:00:37.179 ******** [WARNING]: Module remote_tmp /home/user_quadlet_pod/.ansible/tmp did not exist and was created with a mode of 0700, this may cause issues when running as another user. To avoid this, create the remote_tmp dir with the correct permissions manually ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Monday 06 January 2025 13:45:28 -0500 (0:00:00.684) 0:00:37.863 ******** changed: [managed-node2] => { "changed": true, "name": "quadlet-pod-pod-pod.service", "state": "started", "status": { "AccessSELinuxContext": "unconfined_u:object_r:user_tmp_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "podman-user-wait-network-online.service app.slice basic.target -.mount run-user-2223.mount", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-pod-pod-pod.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveTasksMax": "22349", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPre": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/user/2223/quadlet-pod-pod-pod.pid --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPreEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/user/2223/quadlet-pod-pod-pod.pid --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --ignore --force ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --ignore --force ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/user/2223/systemd/generator/quadlet-pod-pod-pod.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-pod-pod.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3691020288", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-pod-pod.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "200", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/user/2223/quadlet-pod-pod-pod.pid", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "app.slice basic.target", "RequiresMountsFor": "/run/user/2223/containers", "Restart": "on-failure", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "app.slice", "SourcePath": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-pod-pod", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "forking", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "podman-user-wait-network-online.service", "WantsMountsFor": "/home/user_quadlet_pod", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity", "WorkingDirectory": "!/home/user_quadlet_pod" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Monday 06 January 2025 13:45:30 -0500 (0:00:01.365) 0:00:39.228 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Monday 06 January 2025 13:45:30 -0500 (0:00:00.032) 0:00:39.261 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "quadlet-pod-container", "Exec": "/bin/busybox-extras httpd -f -p 80", "Image": "quay.io/libpod/testimage:20210610", "Pod": "quadlet-pod-pod.pod" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Monday 06 January 2025 13:45:30 -0500 (0:00:00.041) 0:00:39.303 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_pod" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Monday 06 January 2025 13:45:30 -0500 (0:00:00.038) 0:00:39.342 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Monday 06 January 2025 13:45:30 -0500 (0:00:00.030) 0:00:39.372 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-container", "__podman_quadlet_type": "container", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Monday 06 January 2025 13:45:30 -0500 (0:00:00.053) 0:00:39.426 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 06 January 2025 13:45:30 -0500 (0:00:00.086) 0:00:39.513 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 06 January 2025 13:45:30 -0500 (0:00:00.039) 0:00:39.552 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Debug] ******************************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Monday 06 January 2025 13:45:30 -0500 (0:00:00.038) 0:00:39.591 ******** ok: [managed-node2] => {} MSG: item { "Container": { "ContainerName": "quadlet-pod-container", "Exec": "/bin/busybox-extras httpd -f -p 80", "Image": "quay.io/libpod/testimage:20210610", "Pod": "quadlet-pod-pod.pod" }, "Install": { "WantedBy": "default.target" }, "name": "quadlet-pod-container", "type": "container" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:20 Monday 06 January 2025 13:45:30 -0500 (0:00:00.040) 0:00:39.631 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35 Monday 06 January 2025 13:45:30 -0500 (0:00:00.044) 0:00:39.676 ******** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736188886.4685817, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736188861.3885152, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3940759558", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:46 Monday 06 January 2025 13:45:30 -0500 (0:00:00.383) 0:00:40.059 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.004198", "end": "2025-01-06 13:45:31.158419", "rc": 0, "start": "2025-01-06 13:45:31.154221" } STDOUT: 0: user_quadlet_pod 589824 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:51 Monday 06 January 2025 13:45:31 -0500 (0:00:00.405) 0:00:40.465 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.005900", "end": "2025-01-06 13:45:31.601423", "rc": 0, "start": "2025-01-06 13:45:31.595523" } STDOUT: 0: user_quadlet_pod 589824 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:56 Monday 06 January 2025 13:45:31 -0500 (0:00:00.452) 0:00:40.918 ******** ok: [managed-node2] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 589824 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 589824 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:69 Monday 06 January 2025 13:45:31 -0500 (0:00:00.140) 0:00:41.058 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74 Monday 06 January 2025 13:45:31 -0500 (0:00:00.053) 0:00:41.112 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79 Monday 06 January 2025 13:45:31 -0500 (0:00:00.074) 0:00:41.187 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Monday 06 January 2025 13:45:32 -0500 (0:00:00.085) 0:00:41.273 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:96 Monday 06 January 2025 13:45:32 -0500 (0:00:00.070) 0:00:41.344 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Monday 06 January 2025 13:45:32 -0500 (0:00:00.065) 0:00:41.410 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/libpod/testimage:20210610" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-container.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_pod", "__podman_xdg_runtime_dir": "/run/user/2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Monday 06 January 2025 13:45:32 -0500 (0:00:00.093) 0:00:41.503 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_pod/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Monday 06 January 2025 13:45:32 -0500 (0:00:00.055) 0:00:41.559 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Monday 06 January 2025 13:45:32 -0500 (0:00:00.059) 0:00:41.618 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [ "quay.io/libpod/testimage:20210610" ], "__podman_quadlet_file": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Monday 06 January 2025 13:45:32 -0500 (0:00:00.175) 0:00:41.793 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Monday 06 January 2025 13:45:32 -0500 (0:00:00.118) 0:00:41.912 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Monday 06 January 2025 13:45:32 -0500 (0:00:00.050) 0:00:41.963 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Monday 06 January 2025 13:45:32 -0500 (0:00:00.146) 0:00:42.110 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Monday 06 January 2025 13:45:33 -0500 (0:00:00.115) 0:00:42.226 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "loginctl", "enable-linger", "user_quadlet_pod" ], "delta": null, "end": null, "rc": 0, "start": null } STDOUT: skipped, since /var/lib/systemd/linger/user_quadlet_pod exists MSG: Did not run command since '/var/lib/systemd/linger/user_quadlet_pod' exists TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Monday 06 January 2025 13:45:33 -0500 (0:00:00.514) 0:00:42.740 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Monday 06 January 2025 13:45:33 -0500 (0:00:00.090) 0:00:42.831 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_item_state | d('present') == 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Monday 06 January 2025 13:45:33 -0500 (0:00:00.059) 0:00:42.890 ******** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Monday 06 January 2025 13:45:33 -0500 (0:00:00.063) 0:00:42.954 ******** changed: [managed-node2] => (item=None) => { "attempts": 1, "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Monday 06 January 2025 13:45:35 -0500 (0:00:01.747) 0:00:44.701 ******** ok: [managed-node2] => { "changed": false, "gid": 2223, "group": "user_quadlet_pod", "mode": "0755", "owner": "user_quadlet_pod", "path": "/home/user_quadlet_pod/.config/containers/systemd", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 33, "state": "directory", "uid": 2223 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Monday 06 January 2025 13:45:35 -0500 (0:00:00.452) 0:00:45.154 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Monday 06 January 2025 13:45:35 -0500 (0:00:00.035) 0:00:45.189 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Monday 06 January 2025 13:45:36 -0500 (0:00:00.039) 0:00:45.229 ******** changed: [managed-node2] => { "changed": true, "checksum": "f0b5c8159fc3c65bf9310a371751609e4c1ba4c3", "dest": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "gid": 2223, "group": "user_quadlet_pod", "md5sum": "daaf6e904ff3c17edeb801084cfe256f", "mode": "0644", "owner": "user_quadlet_pod", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 230, "src": "/root/.ansible/tmp/ansible-tmp-1736189136.065039-19111-268270576847091/.source.container", "state": "file", "uid": 2223 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Monday 06 January 2025 13:45:36 -0500 (0:00:00.757) 0:00:45.986 ******** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Monday 06 January 2025 13:45:37 -0500 (0:00:00.671) 0:00:46.658 ******** changed: [managed-node2] => { "changed": true, "name": "quadlet-pod-container.service", "state": "started", "status": { "AccessSELinuxContext": "unconfined_u:object_r:user_tmp_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "podman-user-wait-network-online.service app.slice run-user-2223.mount quadlet-pod-pod-pod.service basic.target -.mount", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "default.target shutdown.target", "BindsTo": "quadlet-pod-pod-pod.service", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuset io memory pids", "Description": "quadlet-pod-container.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveTasksMax": "22349", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/user/2223/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/user/2223/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/user/2223/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/user/2223/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/user/2223/systemd/generator/quadlet-pod-container.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-container.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3660976128", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-container.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "continue", "OOMScoreAdjust": "200", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "basic.target app.slice", "RequiresMountsFor": "/run/user/2223/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "app.slice", "SourcePath": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-container", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "quadlet-pod-pod-pod.service default.target", "Wants": "podman-user-wait-network-online.service", "WantsMountsFor": "/home/user_quadlet_pod", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity", "WorkingDirectory": "!/home/user_quadlet_pod" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Monday 06 January 2025 13:45:38 -0500 (0:00:00.748) 0:00:47.406 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Monday 06 January 2025 13:45:38 -0500 (0:00:00.033) 0:00:47.439 ******** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Monday 06 January 2025 13:45:38 -0500 (0:00:00.029) 0:00:47.469 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Monday 06 January 2025 13:45:38 -0500 (0:00:00.043) 0:00:47.512 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Check files] ************************************************************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:70 Monday 06 January 2025 13:45:38 -0500 (0:00:00.112) 0:00:47.625 ******** ok: [managed-node2] => (item=quadlet-pod-container.container) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "cat", "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container" ], "delta": "0:00:00.003401", "end": "2025-01-06 13:45:38.744179", "item": "quadlet-pod-container.container", "rc": 0, "start": "2025-01-06 13:45:38.740778" } STDOUT: # # Ansible managed # # system_role:podman [Install] WantedBy=default.target [Container] Image=quay.io/libpod/testimage:20210610 ContainerName=quadlet-pod-container Pod=quadlet-pod-pod.pod Exec=/bin/busybox-extras httpd -f -p 80 ok: [managed-node2] => (item=quadlet-pod-pod.pod) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "cat", "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod" ], "delta": "0:00:00.003628", "end": "2025-01-06 13:45:39.103237", "item": "quadlet-pod-pod.pod", "rc": 0, "start": "2025-01-06 13:45:39.099609" } STDOUT: # # Ansible managed # # system_role:podman [Pod] PodName=quadlet-pod TASK [Check pod] *************************************************************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:79 Monday 06 January 2025 13:45:39 -0500 (0:00:00.764) 0:00:48.389 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "pod", "inspect", "quadlet-pod", "--format", "{{range .Containers}}{{.Name}}\n{{end}}" ], "delta": "0:00:00.057268", "end": "2025-01-06 13:45:39.609431", "failed_when_result": false, "rc": 0, "start": "2025-01-06 13:45:39.552163" } STDOUT: quadlet-pod-infra quadlet-pod-container TASK [Ensure linger] *********************************************************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:91 Monday 06 January 2025 13:45:39 -0500 (0:00:00.577) 0:00:48.967 ******** ok: [managed-node2] => { "changed": false, "failed_when_result": false, "stat": { "atime": 1736189126.355973, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1736189126.355973, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 4467328, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0644", "mtime": 1736189126.355973, "nlink": 1, "path": "/var/lib/systemd/linger/user_quadlet_pod", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 0, "uid": 0, "version": "3099152076", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Cleanup user] ************************************************************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:99 Monday 06 January 2025 13:45:40 -0500 (0:00:00.468) 0:00:49.436 ******** included: fedora.linux_system_roles.podman for managed-node2 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Monday 06 January 2025 13:45:40 -0500 (0:00:00.196) 0:00:49.632 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Monday 06 January 2025 13:45:40 -0500 (0:00:00.134) 0:00:49.767 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Monday 06 January 2025 13:45:40 -0500 (0:00:00.075) 0:00:49.842 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Monday 06 January 2025 13:45:40 -0500 (0:00:00.051) 0:00:49.893 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Monday 06 January 2025 13:45:40 -0500 (0:00:00.130) 0:00:50.024 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Monday 06 January 2025 13:45:40 -0500 (0:00:00.053) 0:00:50.077 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Monday 06 January 2025 13:45:40 -0500 (0:00:00.054) 0:00:50.131 ******** ok: [managed-node2] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Monday 06 January 2025 13:45:41 -0500 (0:00:00.111) 0:00:50.243 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Monday 06 January 2025 13:45:41 -0500 (0:00:00.839) 0:00:51.083 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Monday 06 January 2025 13:45:41 -0500 (0:00:00.044) 0:00:51.127 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Monday 06 January 2025 13:45:41 -0500 (0:00:00.057) 0:00:51.184 ******** skipping: [managed-node2] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Monday 06 January 2025 13:45:42 -0500 (0:00:00.055) 0:00:51.240 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Monday 06 January 2025 13:45:42 -0500 (0:00:00.036) 0:00:51.276 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Monday 06 January 2025 13:45:42 -0500 (0:00:00.038) 0:00:51.315 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.026361", "end": "2025-01-06 13:45:42.422795", "rc": 0, "start": "2025-01-06 13:45:42.396434" } STDOUT: podman version 5.3.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Monday 06 January 2025 13:45:42 -0500 (0:00:00.408) 0:00:51.724 ******** ok: [managed-node2] => { "ansible_facts": { "podman_version": "5.3.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Monday 06 January 2025 13:45:42 -0500 (0:00:00.064) 0:00:51.788 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Monday 06 January 2025 13:45:42 -0500 (0:00:00.032) 0:00:51.821 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Monday 06 January 2025 13:45:42 -0500 (0:00:00.038) 0:00:51.860 ******** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Monday 06 January 2025 13:45:42 -0500 (0:00:00.037) 0:00:51.898 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"5.0\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Monday 06 January 2025 13:45:42 -0500 (0:00:00.053) 0:00:51.951 ******** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Monday 06 January 2025 13:45:42 -0500 (0:00:00.054) 0:00:52.005 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 06 January 2025 13:45:42 -0500 (0:00:00.176) 0:00:52.181 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 06 January 2025 13:45:43 -0500 (0:00:00.048) 0:00:52.230 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Debug] ******************************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Monday 06 January 2025 13:45:43 -0500 (0:00:00.051) 0:00:52.281 ******** ok: [managed-node2] => {} MSG: item {} TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:20 Monday 06 January 2025 13:45:43 -0500 (0:00:00.050) 0:00:52.332 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35 Monday 06 January 2025 13:45:43 -0500 (0:00:00.050) 0:00:52.382 ******** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736188886.4685817, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736188861.3885152, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3940759558", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:46 Monday 06 January 2025 13:45:43 -0500 (0:00:00.382) 0:00:52.765 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.004375", "end": "2025-01-06 13:45:43.874117", "rc": 0, "start": "2025-01-06 13:45:43.869742" } STDOUT: 0: user_quadlet_pod 589824 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:51 Monday 06 January 2025 13:45:43 -0500 (0:00:00.407) 0:00:53.173 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.005666", "end": "2025-01-06 13:45:44.269238", "rc": 0, "start": "2025-01-06 13:45:44.263572" } STDOUT: 0: user_quadlet_pod 589824 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:56 Monday 06 January 2025 13:45:44 -0500 (0:00:00.384) 0:00:53.557 ******** ok: [managed-node2] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 589824 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 589824 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:69 Monday 06 January 2025 13:45:44 -0500 (0:00:00.049) 0:00:53.607 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74 Monday 06 January 2025 13:45:44 -0500 (0:00:00.033) 0:00:53.641 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79 Monday 06 January 2025 13:45:44 -0500 (0:00:00.032) 0:00:53.673 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Monday 06 January 2025 13:45:44 -0500 (0:00:00.033) 0:00:53.707 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:96 Monday 06 January 2025 13:45:44 -0500 (0:00:00.031) 0:00:53.739 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Monday 06 January 2025 13:45:44 -0500 (0:00:00.032) 0:00:53.772 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_container_conf_file": "/home/user_quadlet_pod/.config/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/home/user_quadlet_pod/.config/containers/policy.json", "__podman_registries_conf_file": "/home/user_quadlet_pod/.config/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/home/user_quadlet_pod/.config/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Monday 06 January 2025 13:45:44 -0500 (0:00:00.040) 0:00:53.812 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Monday 06 January 2025 13:45:44 -0500 (0:00:00.076) 0:00:53.889 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Monday 06 January 2025 13:45:44 -0500 (0:00:00.032) 0:00:53.921 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Monday 06 January 2025 13:45:44 -0500 (0:00:00.031) 0:00:53.952 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Monday 06 January 2025 13:45:44 -0500 (0:00:00.084) 0:00:54.037 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Monday 06 January 2025 13:45:44 -0500 (0:00:00.106) 0:00:54.143 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Monday 06 January 2025 13:45:44 -0500 (0:00:00.053) 0:00:54.197 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Monday 06 January 2025 13:45:45 -0500 (0:00:00.095) 0:00:54.292 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Monday 06 January 2025 13:45:45 -0500 (0:00:00.054) 0:00:54.346 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Monday 06 January 2025 13:45:45 -0500 (0:00:00.052) 0:00:54.399 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Monday 06 January 2025 13:45:45 -0500 (0:00:00.101) 0:00:54.501 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Monday 06 January 2025 13:45:45 -0500 (0:00:00.047) 0:00:54.549 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Monday 06 January 2025 13:45:45 -0500 (0:00:00.056) 0:00:54.605 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Monday 06 January 2025 13:45:45 -0500 (0:00:00.049) 0:00:54.654 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Monday 06 January 2025 13:45:45 -0500 (0:00:00.052) 0:00:54.706 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Monday 06 January 2025 13:45:45 -0500 (0:00:00.052) 0:00:54.758 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Monday 06 January 2025 13:45:45 -0500 (0:00:00.054) 0:00:54.813 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Monday 06 January 2025 13:45:45 -0500 (0:00:00.051) 0:00:54.864 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Monday 06 January 2025 13:45:45 -0500 (0:00:00.068) 0:00:54.933 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Monday 06 January 2025 13:45:45 -0500 (0:00:00.041) 0:00:54.975 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Monday 06 January 2025 13:45:45 -0500 (0:00:00.033) 0:00:55.008 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Monday 06 January 2025 13:45:45 -0500 (0:00:00.033) 0:00:55.042 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Monday 06 January 2025 13:45:45 -0500 (0:00:00.098) 0:00:55.141 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "quadlet-pod-container", "Exec": "/bin/busybox-extras httpd -f -p 80", "Image": "quay.io/libpod/testimage:20210610", "Pod": "quadlet-pod-pod.pod" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Monday 06 January 2025 13:45:46 -0500 (0:00:00.086) 0:00:55.227 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_pod" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Monday 06 January 2025 13:45:46 -0500 (0:00:00.040) 0:00:55.267 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Monday 06 January 2025 13:45:46 -0500 (0:00:00.036) 0:00:55.304 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-container", "__podman_quadlet_type": "container", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Monday 06 January 2025 13:45:46 -0500 (0:00:00.065) 0:00:55.370 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 06 January 2025 13:45:46 -0500 (0:00:00.100) 0:00:55.471 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 06 January 2025 13:45:46 -0500 (0:00:00.057) 0:00:55.528 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Debug] ******************************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Monday 06 January 2025 13:45:46 -0500 (0:00:00.057) 0:00:55.585 ******** ok: [managed-node2] => {} MSG: item { "Container": { "ContainerName": "quadlet-pod-container", "Exec": "/bin/busybox-extras httpd -f -p 80", "Image": "quay.io/libpod/testimage:20210610", "Pod": "quadlet-pod-pod.pod" }, "Install": { "WantedBy": "default.target" }, "name": "quadlet-pod-container", "state": "absent", "type": "container" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:20 Monday 06 January 2025 13:45:46 -0500 (0:00:00.057) 0:00:55.642 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35 Monday 06 January 2025 13:45:46 -0500 (0:00:00.067) 0:00:55.710 ******** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736188886.4685817, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736188861.3885152, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3940759558", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:46 Monday 06 January 2025 13:45:46 -0500 (0:00:00.426) 0:00:56.137 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.003944", "end": "2025-01-06 13:45:47.257941", "rc": 0, "start": "2025-01-06 13:45:47.253997" } STDOUT: 0: user_quadlet_pod 589824 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:51 Monday 06 January 2025 13:45:47 -0500 (0:00:00.408) 0:00:56.545 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.004912", "end": "2025-01-06 13:45:47.635012", "rc": 0, "start": "2025-01-06 13:45:47.630100" } STDOUT: 0: user_quadlet_pod 589824 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:56 Monday 06 January 2025 13:45:47 -0500 (0:00:00.378) 0:00:56.923 ******** ok: [managed-node2] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 589824 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 589824 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:69 Monday 06 January 2025 13:45:47 -0500 (0:00:00.056) 0:00:56.980 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74 Monday 06 January 2025 13:45:47 -0500 (0:00:00.041) 0:00:57.022 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79 Monday 06 January 2025 13:45:47 -0500 (0:00:00.036) 0:00:57.059 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Monday 06 January 2025 13:45:47 -0500 (0:00:00.045) 0:00:57.104 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:96 Monday 06 January 2025 13:45:47 -0500 (0:00:00.051) 0:00:57.155 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Monday 06 January 2025 13:45:47 -0500 (0:00:00.037) 0:00:57.193 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/libpod/testimage:20210610" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-container.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_pod", "__podman_xdg_runtime_dir": "/run/user/2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Monday 06 January 2025 13:45:48 -0500 (0:00:00.060) 0:00:57.253 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_pod/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Monday 06 January 2025 13:45:48 -0500 (0:00:00.039) 0:00:57.293 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Monday 06 January 2025 13:45:48 -0500 (0:00:00.028) 0:00:57.321 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [ "quay.io/libpod/testimage:20210610" ], "__podman_quadlet_file": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Monday 06 January 2025 13:45:48 -0500 (0:00:00.117) 0:00:57.439 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Monday 06 January 2025 13:45:48 -0500 (0:00:00.039) 0:00:57.479 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Monday 06 January 2025 13:45:48 -0500 (0:00:00.110) 0:00:57.589 ******** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736189126.392973, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1736189138.0469365, "dev": 194, "device_type": 0, "executable": true, "exists": true, "gid": 2223, "gr_name": "user_quadlet_pod", "inode": 1, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0700", "mtime": 1736189138.0469365, "nlink": 7, "path": "/run/user/2223", "pw_name": "user_quadlet_pod", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 220, "uid": 2223, "version": null, "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": true } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Monday 06 January 2025 13:45:48 -0500 (0:00:00.390) 0:00:57.980 ******** changed: [managed-node2] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-pod-container.service", "state": "stopped", "status": { "AccessSELinuxContext": "unconfined_u:object_r:user_tmp_t:s0", "ActiveEnterTimestamp": "Mon 2025-01-06 13:45:38 EST", "ActiveEnterTimestampMonotonic": "597179829", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "podman-user-wait-network-online.service app.slice run-user-2223.mount quadlet-pod-pod-pod.service basic.target -.mount", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Mon 2025-01-06 13:45:37 EST", "AssertTimestampMonotonic": "597056802", "Before": "default.target shutdown.target", "BindsTo": "quadlet-pod-pod-pod.service", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "92345000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Mon 2025-01-06 13:45:37 EST", "ConditionTimestampMonotonic": "597056797", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/user.slice/user-2223.slice/user@2223.service/app.slice/quadlet-pod-container.service", "ControlGroupId": "12243", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuset io memory pids", "Description": "quadlet-pod-container.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveTasksMax": "22349", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "37880", "ExecMainStartTimestamp": "Mon 2025-01-06 13:45:38 EST", "ExecMainStartTimestampMonotonic": "597179466", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/user/2223/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/user/2223/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; ignore_errors=no ; start_time=[Mon 2025-01-06 13:45:37 EST] ; stop_time=[n/a] ; pid=37870 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/user/2223/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/user/2223/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; flags= ; start_time=[Mon 2025-01-06 13:45:37 EST] ; stop_time=[n/a] ; pid=37870 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/user/2223/systemd/generator/quadlet-pod-container.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-container.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Mon 2025-01-06 13:45:37 EST", "InactiveExitTimestampMonotonic": "597071487", "InvocationID": "d1c0f349cdf640f99820d4eb878b3e82", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "37880", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3660025856", "MemoryCurrent": "897024", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "20795392", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-container.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "continue", "OOMScoreAdjust": "200", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "basic.target app.slice", "RequiresMountsFor": "/run/user/2223/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "app.slice", "SourcePath": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Mon 2025-01-06 13:45:38 EST", "StateChangeTimestampMonotonic": "597179829", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-container", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "quadlet-pod-pod-pod.service default.target", "Wants": "podman-user-wait-network-online.service", "WantsMountsFor": "/home/user_quadlet_pod", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0", "WorkingDirectory": "!/home/user_quadlet_pod" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Monday 06 January 2025 13:45:59 -0500 (0:00:11.007) 0:01:08.987 ******** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736189137.3039389, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "f0b5c8159fc3c65bf9310a371751609e4c1ba4c3", "ctime": 1736189136.682941, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 2223, "gr_name": "user_quadlet_pod", "inode": 469762254, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1736189136.4059417, "nlink": 1, "path": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "pw_name": "user_quadlet_pod", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 230, "uid": 2223, "version": "1174262324", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Monday 06 January 2025 13:46:00 -0500 (0:00:00.395) 0:01:09.383 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Monday 06 January 2025 13:46:00 -0500 (0:00:00.089) 0:01:09.473 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Monday 06 January 2025 13:46:00 -0500 (0:00:00.508) 0:01:09.982 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Monday 06 January 2025 13:46:00 -0500 (0:00:00.048) 0:01:10.030 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Monday 06 January 2025 13:46:00 -0500 (0:00:00.033) 0:01:10.064 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Monday 06 January 2025 13:46:00 -0500 (0:00:00.031) 0:01:10.095 ******** changed: [managed-node2] => { "changed": true, "path": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Monday 06 January 2025 13:46:01 -0500 (0:00:00.412) 0:01:10.508 ******** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Monday 06 January 2025 13:46:01 -0500 (0:00:00.663) 0:01:11.171 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Monday 06 January 2025 13:46:02 -0500 (0:00:00.532) 0:01:11.703 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Monday 06 January 2025 13:46:02 -0500 (0:00:00.046) 0:01:11.750 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Monday 06 January 2025 13:46:02 -0500 (0:00:00.037) 0:01:11.788 ******** changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.136346", "end": "2025-01-06 13:46:03.081986", "rc": 0, "start": "2025-01-06 13:46:02.945640" } STDOUT: 2872b97978749fa1f5d8b6f72950f38cb56d57ca46629686b8ae817be901b1fd 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Monday 06 January 2025 13:46:03 -0500 (0:00:00.583) 0:01:12.372 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Monday 06 January 2025 13:46:03 -0500 (0:00:00.055) 0:01:12.427 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Monday 06 January 2025 13:46:03 -0500 (0:00:00.105) 0:01:12.533 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Monday 06 January 2025 13:46:03 -0500 (0:00:00.058) 0:01:12.591 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_pod" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Monday 06 January 2025 13:46:03 -0500 (0:00:00.044) 0:01:12.635 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.046590", "end": "2025-01-06 13:46:03.840003", "rc": 0, "start": "2025-01-06 13:46:03.793413" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Monday 06 January 2025 13:46:03 -0500 (0:00:00.494) 0:01:13.130 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.046558", "end": "2025-01-06 13:46:04.325426", "rc": 0, "start": "2025-01-06 13:46:04.278868" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Monday 06 January 2025 13:46:04 -0500 (0:00:00.499) 0:01:13.629 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.045881", "end": "2025-01-06 13:46:04.825279", "rc": 0, "start": "2025-01-06 13:46:04.779398" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Monday 06 January 2025 13:46:04 -0500 (0:00:00.485) 0:01:14.114 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.047819", "end": "2025-01-06 13:46:05.311283", "rc": 0, "start": "2025-01-06 13:46:05.263464" } STDOUT: podman TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Monday 06 January 2025 13:46:05 -0500 (0:00:00.487) 0:01:14.602 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Monday 06 January 2025 13:46:05 -0500 (0:00:00.530) 0:01:15.132 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Monday 06 January 2025 13:46:06 -0500 (0:00:00.503) 0:01:15.636 ******** ok: [managed-node2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-pod-container.service": { "name": "quadlet-pod-container.service", "source": "systemd", "state": "running", "status": "generated" }, "quadlet-pod-pod-pod.service": { "name": "quadlet-pod-pod-pod.service", "source": "systemd", "state": "running", "status": "generated" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@2223.service": { "name": "user-runtime-dir@2223.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@2223.service": { "name": "user@2223.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Monday 06 January 2025 13:46:09 -0500 (0:00:03.188) 0:01:18.825 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Monday 06 January 2025 13:46:09 -0500 (0:00:00.034) 0:01:18.860 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Pod": { "PodName": "quadlet-pod" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Monday 06 January 2025 13:46:09 -0500 (0:00:00.044) 0:01:18.904 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_pod" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Monday 06 January 2025 13:46:09 -0500 (0:00:00.043) 0:01:18.948 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Monday 06 January 2025 13:46:09 -0500 (0:00:00.034) 0:01:18.983 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-pod", "__podman_quadlet_type": "pod", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Monday 06 January 2025 13:46:09 -0500 (0:00:00.050) 0:01:19.033 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 06 January 2025 13:46:09 -0500 (0:00:00.062) 0:01:19.096 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 06 January 2025 13:46:09 -0500 (0:00:00.039) 0:01:19.135 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Debug] ******************************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Monday 06 January 2025 13:46:10 -0500 (0:00:00.135) 0:01:19.271 ******** ok: [managed-node2] => {} MSG: item { "Pod": { "PodName": "quadlet-pod" }, "name": "quadlet-pod-pod", "state": "absent", "type": "pod" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:20 Monday 06 January 2025 13:46:10 -0500 (0:00:00.052) 0:01:19.323 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35 Monday 06 January 2025 13:46:10 -0500 (0:00:00.068) 0:01:19.392 ******** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736188886.4685817, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736188861.3885152, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3940759558", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:46 Monday 06 January 2025 13:46:10 -0500 (0:00:00.427) 0:01:19.820 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.003900", "end": "2025-01-06 13:46:10.923303", "rc": 0, "start": "2025-01-06 13:46:10.919403" } STDOUT: 0: user_quadlet_pod 589824 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:51 Monday 06 January 2025 13:46:11 -0500 (0:00:00.415) 0:01:20.235 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.005330", "end": "2025-01-06 13:46:11.354515", "rc": 0, "start": "2025-01-06 13:46:11.349185" } STDOUT: 0: user_quadlet_pod 589824 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:56 Monday 06 January 2025 13:46:11 -0500 (0:00:00.410) 0:01:20.645 ******** ok: [managed-node2] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 589824 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 589824 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:69 Monday 06 January 2025 13:46:11 -0500 (0:00:00.084) 0:01:20.730 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74 Monday 06 January 2025 13:46:11 -0500 (0:00:00.071) 0:01:20.801 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79 Monday 06 January 2025 13:46:11 -0500 (0:00:00.043) 0:01:20.845 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Monday 06 January 2025 13:46:11 -0500 (0:00:00.040) 0:01:20.886 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:96 Monday 06 January 2025 13:46:11 -0500 (0:00:00.037) 0:01:20.923 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Monday 06 January 2025 13:46:11 -0500 (0:00:00.036) 0:01:20.959 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-pod-pod.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_pod", "__podman_xdg_runtime_dir": "/run/user/2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Monday 06 January 2025 13:46:11 -0500 (0:00:00.059) 0:01:21.019 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_pod/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Monday 06 January 2025 13:46:11 -0500 (0:00:00.041) 0:01:21.060 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Monday 06 January 2025 13:46:11 -0500 (0:00:00.051) 0:01:21.112 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Monday 06 January 2025 13:46:12 -0500 (0:00:00.139) 0:01:21.251 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Monday 06 January 2025 13:46:12 -0500 (0:00:00.128) 0:01:21.380 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Monday 06 January 2025 13:46:12 -0500 (0:00:00.095) 0:01:21.476 ******** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736189126.392973, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1736189159.7588677, "dev": 194, "device_type": 0, "executable": true, "exists": true, "gid": 2223, "gr_name": "user_quadlet_pod", "inode": 1, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0700", "mtime": 1736189159.7588677, "nlink": 7, "path": "/run/user/2223", "pw_name": "user_quadlet_pod", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 160, "uid": 2223, "version": null, "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": true } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Monday 06 January 2025 13:46:12 -0500 (0:00:00.405) 0:01:21.881 ******** changed: [managed-node2] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-pod-pod-pod.service", "state": "stopped", "status": { "AccessSELinuxContext": "unconfined_u:object_r:user_tmp_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "app.slice run-user-2223.mount podman-user-wait-network-online.service -.mount basic.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-pod-pod-pod.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveTasksMax": "22349", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPre": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/user/2223/quadlet-pod-pod-pod.pid --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPreEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/user/2223/quadlet-pod-pod-pod.pid --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --ignore --force ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --ignore --force ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/user/2223/systemd/generator/quadlet-pod-pod-pod.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-pod-pod.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3685912576", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-pod-pod.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "200", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/user/2223/quadlet-pod-pod-pod.pid", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "app.slice basic.target", "RequiresMountsFor": "/run/user/2223/containers", "Restart": "on-failure", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "app.slice", "SourcePath": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-pod-pod", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "forking", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "podman-user-wait-network-online.service", "WantsMountsFor": "/home/user_quadlet_pod", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity", "WorkingDirectory": "!/home/user_quadlet_pod" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Monday 06 January 2025 13:46:13 -0500 (0:00:00.876) 0:01:22.758 ******** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736189128.5169663, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "1884c880482430d8bf2e944b003734fb8b7a462d", "ctime": 1736189127.8819683, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 2223, "gr_name": "user_quadlet_pod", "inode": 255853414, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1736189127.616969, "nlink": 1, "path": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "pw_name": "user_quadlet_pod", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 70, "uid": 2223, "version": "809687910", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Monday 06 January 2025 13:46:14 -0500 (0:00:00.490) 0:01:23.248 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Monday 06 January 2025 13:46:14 -0500 (0:00:00.109) 0:01:23.357 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Monday 06 January 2025 13:46:14 -0500 (0:00:00.442) 0:01:23.800 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Monday 06 January 2025 13:46:14 -0500 (0:00:00.108) 0:01:23.909 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Monday 06 January 2025 13:46:14 -0500 (0:00:00.075) 0:01:23.985 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Monday 06 January 2025 13:46:14 -0500 (0:00:00.066) 0:01:24.052 ******** changed: [managed-node2] => { "changed": true, "path": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Monday 06 January 2025 13:46:15 -0500 (0:00:00.437) 0:01:24.489 ******** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Monday 06 January 2025 13:46:16 -0500 (0:00:00.796) 0:01:25.286 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Monday 06 January 2025 13:46:16 -0500 (0:00:00.576) 0:01:25.862 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Monday 06 January 2025 13:46:16 -0500 (0:00:00.091) 0:01:25.954 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Monday 06 January 2025 13:46:16 -0500 (0:00:00.064) 0:01:26.019 ******** changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.043792", "end": "2025-01-06 13:46:17.253396", "rc": 0, "start": "2025-01-06 13:46:17.209604" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Monday 06 January 2025 13:46:17 -0500 (0:00:00.673) 0:01:26.692 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Monday 06 January 2025 13:46:17 -0500 (0:00:00.106) 0:01:26.799 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Monday 06 January 2025 13:46:17 -0500 (0:00:00.062) 0:01:26.861 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Monday 06 January 2025 13:46:17 -0500 (0:00:00.051) 0:01:26.913 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_pod" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Monday 06 January 2025 13:46:17 -0500 (0:00:00.061) 0:01:26.974 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.043397", "end": "2025-01-06 13:46:18.201739", "rc": 0, "start": "2025-01-06 13:46:18.158342" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Monday 06 January 2025 13:46:18 -0500 (0:00:00.546) 0:01:27.521 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.041963", "end": "2025-01-06 13:46:18.770370", "rc": 0, "start": "2025-01-06 13:46:18.728407" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Monday 06 January 2025 13:46:18 -0500 (0:00:00.565) 0:01:28.087 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.043234", "end": "2025-01-06 13:46:19.364544", "rc": 0, "start": "2025-01-06 13:46:19.321310" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Monday 06 January 2025 13:46:19 -0500 (0:00:00.586) 0:01:28.674 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.047171", "end": "2025-01-06 13:46:19.898517", "rc": 0, "start": "2025-01-06 13:46:19.851346" } STDOUT: podman TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Monday 06 January 2025 13:46:20 -0500 (0:00:00.564) 0:01:29.238 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Monday 06 January 2025 13:46:20 -0500 (0:00:00.543) 0:01:29.782 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Monday 06 January 2025 13:46:21 -0500 (0:00:00.544) 0:01:30.326 ******** ok: [managed-node2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-pod-container.service": { "name": "quadlet-pod-container.service", "source": "systemd", "state": "running", "status": "generated" }, "quadlet-pod-pod-pod.service": { "name": "quadlet-pod-pod-pod.service", "source": "systemd", "state": "running", "status": "generated" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@2223.service": { "name": "user-runtime-dir@2223.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@2223.service": { "name": "user@2223.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Monday 06 January 2025 13:46:23 -0500 (0:00:02.012) 0:01:32.338 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Monday 06 January 2025 13:46:23 -0500 (0:00:00.115) 0:01:32.453 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml for managed-node2 => (item=user_quadlet_pod) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:4 Monday 06 January 2025 13:46:23 -0500 (0:00:00.242) 0:01:32.696 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_linger_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set cancel linger vars] *************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:11 Monday 06 January 2025 13:46:23 -0500 (0:00:00.184) 0:01:32.880 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_xdg_runtime_dir": "/run/user/2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:16 Monday 06 January 2025 13:46:23 -0500 (0:00:00.046) 0:01:32.927 ******** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736189126.392973, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1736189159.7588677, "dev": 194, "device_type": 0, "executable": true, "exists": true, "gid": 2223, "gr_name": "user_quadlet_pod", "inode": 1, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0700", "mtime": 1736189159.7588677, "nlink": 7, "path": "/run/user/2223", "pw_name": "user_quadlet_pod", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 160, "uid": 2223, "version": null, "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": true } } TASK [fedora.linux_system_roles.podman : Gather facts for containers] ********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:21 Monday 06 January 2025 13:46:24 -0500 (0:00:00.416) 0:01:33.343 ******** ok: [managed-node2] => { "changed": false, "containers": [] } TASK [fedora.linux_system_roles.podman : Gather facts for networks] ************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:30 Monday 06 January 2025 13:46:24 -0500 (0:00:00.715) 0:01:34.058 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-q" ], "delta": "0:00:00.045907", "end": "2025-01-06 13:46:25.285249", "rc": 0, "start": "2025-01-06 13:46:25.239342" } STDOUT: podman TASK [fedora.linux_system_roles.podman : Gather secrets] *********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:40 Monday 06 January 2025 13:46:25 -0500 (0:00:00.522) 0:01:34.581 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "secret", "ls", "-n", "-q" ], "delta": "0:00:00.044294", "end": "2025-01-06 13:46:25.774144", "rc": 0, "start": "2025-01-06 13:46:25.729850" } TASK [fedora.linux_system_roles.podman : Cancel linger if no more resources are in use] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:50 Monday 06 January 2025 13:46:25 -0500 (0:00:00.516) 0:01:35.098 ******** changed: [managed-node2] => { "changed": true, "cmd": [ "loginctl", "disable-linger", "user_quadlet_pod" ], "delta": "0:00:00.007226", "end": "2025-01-06 13:46:26.219679", "rc": 0, "start": "2025-01-06 13:46:26.212453" } TASK [fedora.linux_system_roles.podman : Wait for user session to exit closing state] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:62 Monday 06 January 2025 13:46:26 -0500 (0:00:00.422) 0:01:35.520 ******** FAILED - RETRYING: [managed-node2]: Wait for user session to exit closing state (3 retries left). FAILED - RETRYING: [managed-node2]: Wait for user session to exit closing state (2 retries left). FAILED - RETRYING: [managed-node2]: Wait for user session to exit closing state (1 retries left). fatal: [managed-node2]: FAILED! => { "attempts": 3, "changed": false, "cmd": [ "loginctl", "show-user", "--value", "-p", "State", "user_quadlet_pod" ], "delta": "0:00:00.006806", "end": "2025-01-06 13:46:42.676144", "rc": 0, "start": "2025-01-06 13:46:42.669338" } STDOUT: closing ...ignoring TASK [fedora.linux_system_roles.podman : Stop logind] ************************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:76 Monday 06 January 2025 13:46:42 -0500 (0:00:16.467) 0:01:51.988 ******** changed: [managed-node2] => { "changed": true, "name": "systemd-logind", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Mon 2025-01-06 13:36:03 EST", "ActiveEnterTimestampMonotonic": "23683046", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "systemd-journald.socket user.slice dbus.socket sysinit.target systemd-remount-fs.service system.slice basic.target -.mount modprobe@drm.service nss-user-lookup.target systemd-tmpfiles-setup.service tmp.mount", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Mon 2025-01-06 13:36:03 EST", "AssertTimestampMonotonic": "23621856", "Before": "multi-user.target session-5.scope shutdown.target session-4.scope", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.freedesktop.login1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "320910000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanClean": "runtime state fdstore", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_linux_immutable cap_sys_admin cap_sys_tty_config cap_audit_control cap_mac_admin", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Mon 2025-01-06 13:36:03 EST", "ConditionTimestampMonotonic": "23621848", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/systemd-logind.service", "ControlGroupId": "2899", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "User Login Management", "DeviceAllow": "block-* r", "DevicePolicy": "auto", "Documentation": "\"man:sd-login(3)\" \"man:systemd-logind.service(8)\" \"man:logind.conf(5)\" \"man:org.freedesktop.login1(5)\"", "DropInPaths": "/usr/lib/systemd/system/systemd-logind.service.d/10-grub2-logind-service.conf", "DynamicUser": "no", "EffectiveCPUs": "0-1", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveMemoryNodes": "0", "EffectiveTasksMax": "22349", "Environment": "SYSTEMD_REBOOT_TO_BOOT_LOADER_MENU=true", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Mon 2025-01-06 13:36:03 EST", "ExecMainHandoffTimestampMonotonic": "23669188", "ExecMainPID": "662", "ExecMainStartTimestamp": "Mon 2025-01-06 13:36:03 EST", "ExecMainStartTimestampMonotonic": "23630956", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/lib/systemd/systemd-logind ; argv[]=/usr/lib/systemd/systemd-logind ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/lib/systemd/systemd-logind ; argv[]=/usr/lib/systemd/systemd-logind ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "768", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/systemd-logind.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPAddressDeny": "0.0.0.0/0 ::/0", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "systemd-logind.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Mon 2025-01-06 13:36:03 EST", "InactiveExitTimestampMonotonic": "23631247", "InvocationID": "56ce2429d08d429d90f9d634cb6a68fa", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "524288", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "662", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3136434176", "MemoryCurrent": "17510400", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "18034688", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "5", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "systemd-logind.service dbus-org.freedesktop.login1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "yes", "NonBlocking": "no", "NotifyAccess": "main", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "yes", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectHome": "yes", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "yes", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "strict", "ReadWritePaths": "/etc /run", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target -.mount", "RequiresMountsFor": "/run/systemd/sessions /run/systemd/shutdown /run/systemd/users /run/systemd/seats /run/systemd/inhibit /var/lib/systemd/linger", "Restart": "always", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "0", "RestartUSecNext": "0", "RestrictAddressFamilies": "AF_NETLINK AF_UNIX", "RestrictNamespaces": "yes", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectory": "systemd/inhibit systemd/seats systemd/sessions systemd/shutdown systemd/users", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "yes", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Mon 2025-01-06 13:36:03 EST", "StateChangeTimestampMonotonic": "23683046", "StateDirectory": "systemd/linger", "StateDirectoryMode": "0755", "StatusErrno": "0", "StatusText": "Processing requests...", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "1", "SystemCallFilter": "_llseek _newselect accept accept4 access add_key alarm arch_prctl arm_fadvise64_64 bind brk cacheflush capget capset chdir chmod chown chown32 clock_getres clock_getres_time64 clock_gettime clock_gettime64 clock_nanosleep clock_nanosleep_time64 clone clone3 close close_range connect copy_file_range creat dup dup2 dup3 epoll_create epoll_create1 epoll_ctl epoll_ctl_old epoll_pwait epoll_pwait2 epoll_wait epoll_wait_old eventfd eventfd2 execve execveat exit exit_group faccessat faccessat2 fadvise64 fadvise64_64 fallocate fchdir fchmod fchmodat fchown fchown32 fchownat fcntl fcntl64 fdatasync fgetxattr flistxattr flock fork fremovexattr fsetxattr fstat fstat64 fstatat64 fstatfs fstatfs64 fsync ftruncate ftruncate64 futex futex_time64 futimesat get_mempolicy get_robust_list get_thread_area getcpu getcwd getdents getdents64 getegid getegid32 geteuid geteuid32 getgid getgid32 getgroups getgroups32 getitimer getpeername getpgid getpgrp getpid getppid getpriority getrandom getresgid getresgid32 getresuid getresuid32 getrlimit getrusage getsid getsockname getsockopt gettid gettimeofday getuid getuid32 getxattr inotify_add_watch inotify_init inotify_init1 inotify_rm_watch io_cancel io_destroy io_getevents io_pgetevents io_pgetevents_time64 io_setup io_submit io_uring_enter io_uring_register io_uring_setup ioctl ioprio_get ioprio_set ipc kcmp keyctl kill lchown lchown32 lgetxattr link linkat listen listxattr llistxattr lremovexattr lseek lsetxattr lstat lstat64 madvise mbind membarrier memfd_create migrate_pages mkdir mkdirat mknod mknodat mlock mlock2 mlockall mmap mmap2 move_pages mprotect mq_getsetattr mq_notify mq_open mq_timedreceive mq_timedreceive_time64 mq_timedsend mq_timedsend_time64 mq_unlink mremap msgctl msgget msgrcv msgsnd msync munlock munlockall munmap name_to_handle_at nanosleep newfstatat nice oldfstat oldlstat oldolduname oldstat olduname open openat openat2 pause personality pidfd_open pidfd_send_signal pipe pipe2 poll ppoll ppoll_time64 prctl pread64 preadv preadv2 prlimit64 process_madvise process_vm_readv process_vm_writev pselect6 pselect6_time64 pwrite64 pwritev pwritev2 read readahead readdir readlink readlinkat readv recv recvfrom recvmmsg recvmmsg_time64 recvmsg remap_file_pages removexattr rename renameat renameat2 request_key restart_syscall riscv_flush_icache rmdir rseq rt_sigaction rt_sigpending rt_sigprocmask rt_sigqueueinfo rt_sigreturn rt_sigsuspend rt_sigtimedwait rt_sigtimedwait_time64 rt_tgsigqueueinfo sched_get_priority_max sched_get_priority_min sched_getaffinity sched_getattr sched_getparam sched_getscheduler sched_rr_get_interval sched_rr_get_interval_time64 sched_setaffinity sched_setattr sched_setparam sched_setscheduler sched_yield select semctl semget semop semtimedop semtimedop_time64 send sendfile sendfile64 sendmmsg sendmsg sendto set_mempolicy set_robust_list set_thread_area set_tid_address set_tls setfsgid setfsgid32 setfsuid setfsuid32 setgid setgid32 setgroups setgroups32 setitimer setns setpgid setpriority setregid setregid32 setresgid setresgid32 setresuid setresuid32 setreuid setreuid32 setrlimit setsid setsockopt setuid setuid32 setxattr shmat shmctl shmdt shmget shutdown sigaction sigaltstack signal signalfd signalfd4 sigpending sigprocmask sigreturn sigsuspend socket socketcall socketpair splice stat stat64 statfs statfs64 statx swapcontext symlink symlinkat sync sync_file_range sync_file_range2 syncfs sysinfo tee tgkill time timer_create timer_delete timer_getoverrun timer_gettime timer_gettime64 timer_settime timer_settime64 timerfd_create timerfd_gettime timerfd_gettime64 timerfd_settime timerfd_settime64 times tkill truncate truncate64 ugetrlimit umask uname unlink unlinkat unshare userfaultfd utime utimensat utimensat_time64 utimes vfork vmsplice wait4 waitid waitpid write writev", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "1", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify-reload", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "static", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "user.slice modprobe@drm.service -.mount dbus.socket", "WantsMountsFor": "/tmp /var/tmp", "WatchdogSignal": "6", "WatchdogTimestamp": "Mon 2025-01-06 13:46:18 EST", "WatchdogTimestampMonotonic": "637631918", "WatchdogUSec": "3min" } } TASK [fedora.linux_system_roles.podman : Wait for user session to exit closing state] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:81 Monday 06 January 2025 13:46:43 -0500 (0:00:00.579) 0:01:52.567 ******** FAILED - RETRYING: [managed-node2]: Wait for user session to exit closing state (3 retries left). FAILED - RETRYING: [managed-node2]: Wait for user session to exit closing state (2 retries left). ok: [managed-node2] => { "attempts": 3, "changed": false, "cmd": [ "loginctl", "show-user", "--value", "-p", "State", "user_quadlet_pod" ], "delta": "0:00:00.006220", "end": "2025-01-06 13:46:55.757946", "failed_when_result": false, "rc": 1, "start": "2025-01-06 13:46:55.751726" } STDERR: Failed to get user: User ID 2223 is not logged in or lingering MSG: non-zero return code TASK [fedora.linux_system_roles.podman : Restart logind] *********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:92 Monday 06 January 2025 13:46:55 -0500 (0:00:12.480) 0:02:05.047 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__user_state is failed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Monday 06 January 2025 13:46:55 -0500 (0:00:00.034) 0:02:05.082 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Monday 06 January 2025 13:46:55 -0500 (0:00:00.031) 0:02:05.113 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Ensure no resources] ***************************************************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:109 Monday 06 January 2025 13:46:55 -0500 (0:00:00.070) 0:02:05.184 ******** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Ensure no linger] ******************************************************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:120 Monday 06 January 2025 13:46:56 -0500 (0:00:00.092) 0:02:05.276 ******** ok: [managed-node2] => { "changed": false, "failed_when_result": false, "stat": { "exists": false } } TASK [Cleanup user] ************************************************************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:159 Monday 06 January 2025 13:46:56 -0500 (0:00:00.414) 0:02:05.691 ******** included: fedora.linux_system_roles.podman for managed-node2 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Monday 06 January 2025 13:46:56 -0500 (0:00:00.194) 0:02:05.885 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Monday 06 January 2025 13:46:56 -0500 (0:00:00.095) 0:02:05.981 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Monday 06 January 2025 13:46:56 -0500 (0:00:00.055) 0:02:06.036 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Monday 06 January 2025 13:46:56 -0500 (0:00:00.039) 0:02:06.076 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Monday 06 January 2025 13:46:56 -0500 (0:00:00.040) 0:02:06.117 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Monday 06 January 2025 13:46:56 -0500 (0:00:00.038) 0:02:06.156 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Monday 06 January 2025 13:46:56 -0500 (0:00:00.033) 0:02:06.189 ******** ok: [managed-node2] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Monday 06 January 2025 13:46:57 -0500 (0:00:00.076) 0:02:06.265 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Monday 06 January 2025 13:46:57 -0500 (0:00:00.788) 0:02:07.054 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Monday 06 January 2025 13:46:57 -0500 (0:00:00.041) 0:02:07.095 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Monday 06 January 2025 13:46:57 -0500 (0:00:00.043) 0:02:07.139 ******** skipping: [managed-node2] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Monday 06 January 2025 13:46:57 -0500 (0:00:00.033) 0:02:07.173 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Monday 06 January 2025 13:46:57 -0500 (0:00:00.032) 0:02:07.206 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Monday 06 January 2025 13:46:58 -0500 (0:00:00.075) 0:02:07.281 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.028694", "end": "2025-01-06 13:46:58.383557", "rc": 0, "start": "2025-01-06 13:46:58.354863" } STDOUT: podman version 5.3.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Monday 06 January 2025 13:46:58 -0500 (0:00:00.410) 0:02:07.692 ******** ok: [managed-node2] => { "ansible_facts": { "podman_version": "5.3.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Monday 06 January 2025 13:46:58 -0500 (0:00:00.057) 0:02:07.750 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Monday 06 January 2025 13:46:58 -0500 (0:00:00.037) 0:02:07.787 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Monday 06 January 2025 13:46:58 -0500 (0:00:00.046) 0:02:07.834 ******** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Monday 06 January 2025 13:46:58 -0500 (0:00:00.051) 0:02:07.885 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"5.0\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Monday 06 January 2025 13:46:58 -0500 (0:00:00.063) 0:02:07.949 ******** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Monday 06 January 2025 13:46:58 -0500 (0:00:00.060) 0:02:08.009 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 06 January 2025 13:46:58 -0500 (0:00:00.064) 0:02:08.074 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 06 January 2025 13:46:58 -0500 (0:00:00.038) 0:02:08.113 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Debug] ******************************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Monday 06 January 2025 13:46:58 -0500 (0:00:00.039) 0:02:08.152 ******** ok: [managed-node2] => {} MSG: item {} TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:20 Monday 06 January 2025 13:46:58 -0500 (0:00:00.035) 0:02:08.188 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35 Monday 06 January 2025 13:46:59 -0500 (0:00:00.045) 0:02:08.233 ******** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736188886.4685817, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736188861.3885152, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3940759558", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:46 Monday 06 January 2025 13:46:59 -0500 (0:00:00.379) 0:02:08.613 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.003995", "end": "2025-01-06 13:46:59.694122", "rc": 0, "start": "2025-01-06 13:46:59.690127" } STDOUT: 0: user_quadlet_pod 589824 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:51 Monday 06 January 2025 13:46:59 -0500 (0:00:00.438) 0:02:09.051 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.005291", "end": "2025-01-06 13:47:00.168066", "rc": 0, "start": "2025-01-06 13:47:00.162775" } STDOUT: 0: user_quadlet_pod 589824 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:56 Monday 06 January 2025 13:47:00 -0500 (0:00:00.426) 0:02:09.477 ******** ok: [managed-node2] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 589824 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 589824 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:69 Monday 06 January 2025 13:47:00 -0500 (0:00:00.082) 0:02:09.560 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74 Monday 06 January 2025 13:47:00 -0500 (0:00:00.046) 0:02:09.606 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79 Monday 06 January 2025 13:47:00 -0500 (0:00:00.044) 0:02:09.651 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Monday 06 January 2025 13:47:00 -0500 (0:00:00.043) 0:02:09.695 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:96 Monday 06 January 2025 13:47:00 -0500 (0:00:00.036) 0:02:09.731 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Monday 06 January 2025 13:47:00 -0500 (0:00:00.035) 0:02:09.767 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_container_conf_file": "/home/user_quadlet_pod/.config/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/home/user_quadlet_pod/.config/containers/policy.json", "__podman_registries_conf_file": "/home/user_quadlet_pod/.config/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/home/user_quadlet_pod/.config/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Monday 06 January 2025 13:47:00 -0500 (0:00:00.043) 0:02:09.810 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Monday 06 January 2025 13:47:00 -0500 (0:00:00.066) 0:02:09.876 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Monday 06 January 2025 13:47:00 -0500 (0:00:00.042) 0:02:09.919 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Monday 06 January 2025 13:47:00 -0500 (0:00:00.056) 0:02:09.976 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Monday 06 January 2025 13:47:00 -0500 (0:00:00.073) 0:02:10.049 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Monday 06 January 2025 13:47:00 -0500 (0:00:00.040) 0:02:10.089 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Monday 06 January 2025 13:47:00 -0500 (0:00:00.084) 0:02:10.174 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Monday 06 January 2025 13:47:01 -0500 (0:00:00.064) 0:02:10.239 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Monday 06 January 2025 13:47:01 -0500 (0:00:00.035) 0:02:10.274 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Monday 06 January 2025 13:47:01 -0500 (0:00:00.037) 0:02:10.311 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Monday 06 January 2025 13:47:01 -0500 (0:00:00.100) 0:02:10.412 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Monday 06 January 2025 13:47:01 -0500 (0:00:00.040) 0:02:10.453 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Monday 06 January 2025 13:47:01 -0500 (0:00:00.040) 0:02:10.494 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Monday 06 January 2025 13:47:01 -0500 (0:00:00.039) 0:02:10.533 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Monday 06 January 2025 13:47:01 -0500 (0:00:00.034) 0:02:10.567 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Monday 06 January 2025 13:47:01 -0500 (0:00:00.035) 0:02:10.602 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Monday 06 January 2025 13:47:01 -0500 (0:00:00.041) 0:02:10.644 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Monday 06 January 2025 13:47:01 -0500 (0:00:00.034) 0:02:10.678 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Monday 06 January 2025 13:47:01 -0500 (0:00:00.035) 0:02:10.713 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Monday 06 January 2025 13:47:01 -0500 (0:00:00.046) 0:02:10.760 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Monday 06 January 2025 13:47:01 -0500 (0:00:00.049) 0:02:10.809 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Monday 06 January 2025 13:47:01 -0500 (0:00:00.112) 0:02:10.922 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Monday 06 January 2025 13:47:01 -0500 (0:00:00.155) 0:02:11.077 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "quadlet-pod-container", "Exec": "/bin/busybox-extras httpd -f -p 80", "Image": "quay.io/libpod/testimage:20210610", "Pod": "quadlet-pod-pod.pod" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Monday 06 January 2025 13:47:01 -0500 (0:00:00.073) 0:02:11.151 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_pod" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Monday 06 January 2025 13:47:02 -0500 (0:00:00.071) 0:02:11.222 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Monday 06 January 2025 13:47:02 -0500 (0:00:00.059) 0:02:11.281 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-container", "__podman_quadlet_type": "container", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Monday 06 January 2025 13:47:02 -0500 (0:00:00.082) 0:02:11.364 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 06 January 2025 13:47:02 -0500 (0:00:00.106) 0:02:11.471 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 06 January 2025 13:47:02 -0500 (0:00:00.062) 0:02:11.534 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Debug] ******************************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Monday 06 January 2025 13:47:02 -0500 (0:00:00.065) 0:02:11.600 ******** ok: [managed-node2] => {} MSG: item { "Container": { "ContainerName": "quadlet-pod-container", "Exec": "/bin/busybox-extras httpd -f -p 80", "Image": "quay.io/libpod/testimage:20210610", "Pod": "quadlet-pod-pod.pod" }, "Install": { "WantedBy": "default.target" }, "name": "quadlet-pod-container", "state": "absent", "type": "container" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:20 Monday 06 January 2025 13:47:02 -0500 (0:00:00.076) 0:02:11.676 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35 Monday 06 January 2025 13:47:02 -0500 (0:00:00.084) 0:02:11.760 ******** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736188886.4685817, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736188861.3885152, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3940759558", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:46 Monday 06 January 2025 13:47:02 -0500 (0:00:00.408) 0:02:12.168 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.004059", "end": "2025-01-06 13:47:03.287469", "rc": 0, "start": "2025-01-06 13:47:03.283410" } STDOUT: 0: user_quadlet_pod 589824 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:51 Monday 06 January 2025 13:47:03 -0500 (0:00:00.407) 0:02:12.576 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.005636", "end": "2025-01-06 13:47:03.680666", "rc": 0, "start": "2025-01-06 13:47:03.675030" } STDOUT: 0: user_quadlet_pod 589824 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:56 Monday 06 January 2025 13:47:03 -0500 (0:00:00.405) 0:02:12.982 ******** ok: [managed-node2] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 589824 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 589824 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:69 Monday 06 January 2025 13:47:03 -0500 (0:00:00.098) 0:02:13.080 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74 Monday 06 January 2025 13:47:03 -0500 (0:00:00.036) 0:02:13.117 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79 Monday 06 January 2025 13:47:03 -0500 (0:00:00.037) 0:02:13.154 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Monday 06 January 2025 13:47:03 -0500 (0:00:00.041) 0:02:13.196 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:96 Monday 06 January 2025 13:47:04 -0500 (0:00:00.040) 0:02:13.236 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Monday 06 January 2025 13:47:04 -0500 (0:00:00.042) 0:02:13.278 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/libpod/testimage:20210610" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-container.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_pod", "__podman_xdg_runtime_dir": "/run/user/2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Monday 06 January 2025 13:47:04 -0500 (0:00:00.065) 0:02:13.343 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_pod/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Monday 06 January 2025 13:47:04 -0500 (0:00:00.036) 0:02:13.380 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Monday 06 January 2025 13:47:04 -0500 (0:00:00.033) 0:02:13.414 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [ "quay.io/libpod/testimage:20210610" ], "__podman_quadlet_file": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Monday 06 January 2025 13:47:04 -0500 (0:00:00.078) 0:02:13.492 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Monday 06 January 2025 13:47:04 -0500 (0:00:00.049) 0:02:13.541 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Monday 06 January 2025 13:47:04 -0500 (0:00:00.110) 0:02:13.652 ******** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Monday 06 January 2025 13:47:04 -0500 (0:00:00.384) 0:02:14.037 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_rootless or __podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Monday 06 January 2025 13:47:04 -0500 (0:00:00.038) 0:02:14.075 ******** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Monday 06 January 2025 13:47:05 -0500 (0:00:00.371) 0:02:14.446 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Monday 06 January 2025 13:47:05 -0500 (0:00:00.033) 0:02:14.479 ******** ok: [managed-node2] => { "changed": false, "path": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Monday 06 January 2025 13:47:05 -0500 (0:00:00.458) 0:02:14.937 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_file_removed is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Monday 06 January 2025 13:47:05 -0500 (0:00:00.061) 0:02:14.998 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Monday 06 January 2025 13:47:05 -0500 (0:00:00.059) 0:02:15.058 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Monday 06 January 2025 13:47:05 -0500 (0:00:00.077) 0:02:15.135 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Monday 06 January 2025 13:47:05 -0500 (0:00:00.060) 0:02:15.195 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_rootless or __podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Monday 06 January 2025 13:47:06 -0500 (0:00:00.060) 0:02:15.256 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Monday 06 January 2025 13:47:06 -0500 (0:00:00.101) 0:02:15.357 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Monday 06 January 2025 13:47:06 -0500 (0:00:00.062) 0:02:15.419 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Monday 06 January 2025 13:47:06 -0500 (0:00:00.064) 0:02:15.483 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_pod" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Monday 06 January 2025 13:47:06 -0500 (0:00:00.054) 0:02:15.538 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Monday 06 January 2025 13:47:06 -0500 (0:00:00.043) 0:02:15.582 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Monday 06 January 2025 13:47:06 -0500 (0:00:00.046) 0:02:15.628 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Monday 06 January 2025 13:47:06 -0500 (0:00:00.040) 0:02:15.669 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Monday 06 January 2025 13:47:06 -0500 (0:00:00.042) 0:02:15.711 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Monday 06 January 2025 13:47:06 -0500 (0:00:00.037) 0:02:15.748 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Monday 06 January 2025 13:47:06 -0500 (0:00:00.036) 0:02:15.785 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Monday 06 January 2025 13:47:06 -0500 (0:00:00.104) 0:02:15.890 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Monday 06 January 2025 13:47:06 -0500 (0:00:00.053) 0:02:15.943 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Pod": { "PodName": "quadlet-pod" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Monday 06 January 2025 13:47:06 -0500 (0:00:00.047) 0:02:15.991 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_pod" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Monday 06 January 2025 13:47:06 -0500 (0:00:00.054) 0:02:16.045 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Monday 06 January 2025 13:47:06 -0500 (0:00:00.038) 0:02:16.084 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-pod", "__podman_quadlet_type": "pod", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Monday 06 January 2025 13:47:06 -0500 (0:00:00.049) 0:02:16.133 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 06 January 2025 13:47:06 -0500 (0:00:00.065) 0:02:16.199 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 06 January 2025 13:47:07 -0500 (0:00:00.039) 0:02:16.238 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Debug] ******************************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Monday 06 January 2025 13:47:07 -0500 (0:00:00.046) 0:02:16.285 ******** ok: [managed-node2] => {} MSG: item { "Pod": { "PodName": "quadlet-pod" }, "name": "quadlet-pod-pod", "state": "absent", "type": "pod" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:20 Monday 06 January 2025 13:47:07 -0500 (0:00:00.065) 0:02:16.350 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35 Monday 06 January 2025 13:47:07 -0500 (0:00:00.050) 0:02:16.400 ******** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736188886.4685817, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736188861.3885152, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3940759558", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:46 Monday 06 January 2025 13:47:07 -0500 (0:00:00.407) 0:02:16.808 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.004467", "end": "2025-01-06 13:47:07.919049", "rc": 0, "start": "2025-01-06 13:47:07.914582" } STDOUT: 0: user_quadlet_pod 589824 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:51 Monday 06 January 2025 13:47:08 -0500 (0:00:00.427) 0:02:17.235 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.004901", "end": "2025-01-06 13:47:08.360211", "rc": 0, "start": "2025-01-06 13:47:08.355310" } STDOUT: 0: user_quadlet_pod 589824 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:56 Monday 06 January 2025 13:47:08 -0500 (0:00:00.413) 0:02:17.649 ******** ok: [managed-node2] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 589824 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 589824 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:69 Monday 06 January 2025 13:47:08 -0500 (0:00:00.050) 0:02:17.700 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74 Monday 06 January 2025 13:47:08 -0500 (0:00:00.097) 0:02:17.797 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79 Monday 06 January 2025 13:47:08 -0500 (0:00:00.059) 0:02:17.857 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Monday 06 January 2025 13:47:08 -0500 (0:00:00.061) 0:02:17.918 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:96 Monday 06 January 2025 13:47:08 -0500 (0:00:00.061) 0:02:17.979 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Monday 06 January 2025 13:47:08 -0500 (0:00:00.058) 0:02:18.038 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-pod-pod.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_pod", "__podman_xdg_runtime_dir": "/run/user/2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Monday 06 January 2025 13:47:08 -0500 (0:00:00.073) 0:02:18.112 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_pod/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Monday 06 January 2025 13:47:08 -0500 (0:00:00.042) 0:02:18.155 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Monday 06 January 2025 13:47:08 -0500 (0:00:00.037) 0:02:18.193 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Monday 06 January 2025 13:47:09 -0500 (0:00:00.080) 0:02:18.273 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Monday 06 January 2025 13:47:09 -0500 (0:00:00.042) 0:02:18.316 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Monday 06 January 2025 13:47:09 -0500 (0:00:00.078) 0:02:18.395 ******** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Monday 06 January 2025 13:47:09 -0500 (0:00:00.391) 0:02:18.787 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_rootless or __podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Monday 06 January 2025 13:47:09 -0500 (0:00:00.036) 0:02:18.824 ******** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Monday 06 January 2025 13:47:09 -0500 (0:00:00.363) 0:02:19.187 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Monday 06 January 2025 13:47:10 -0500 (0:00:00.037) 0:02:19.225 ******** ok: [managed-node2] => { "changed": false, "path": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Monday 06 January 2025 13:47:10 -0500 (0:00:00.379) 0:02:19.605 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_file_removed is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Monday 06 January 2025 13:47:10 -0500 (0:00:00.087) 0:02:19.692 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Monday 06 January 2025 13:47:10 -0500 (0:00:00.036) 0:02:19.729 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Monday 06 January 2025 13:47:10 -0500 (0:00:00.052) 0:02:19.781 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Monday 06 January 2025 13:47:10 -0500 (0:00:00.035) 0:02:19.817 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_rootless or __podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Monday 06 January 2025 13:47:10 -0500 (0:00:00.040) 0:02:19.857 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Monday 06 January 2025 13:47:10 -0500 (0:00:00.090) 0:02:19.948 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Monday 06 January 2025 13:47:10 -0500 (0:00:00.040) 0:02:19.988 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_item_state | d('present') != 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Monday 06 January 2025 13:47:10 -0500 (0:00:00.041) 0:02:20.029 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_pod" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Monday 06 January 2025 13:47:10 -0500 (0:00:00.050) 0:02:20.080 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Monday 06 January 2025 13:47:10 -0500 (0:00:00.042) 0:02:20.122 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Monday 06 January 2025 13:47:10 -0500 (0:00:00.040) 0:02:20.163 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Monday 06 January 2025 13:47:10 -0500 (0:00:00.038) 0:02:20.201 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Monday 06 January 2025 13:47:11 -0500 (0:00:00.035) 0:02:20.236 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Monday 06 January 2025 13:47:11 -0500 (0:00:00.036) 0:02:20.273 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Monday 06 January 2025 13:47:11 -0500 (0:00:00.037) 0:02:20.311 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_test_debug | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Monday 06 January 2025 13:47:11 -0500 (0:00:00.035) 0:02:20.347 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Monday 06 January 2025 13:47:11 -0500 (0:00:00.081) 0:02:20.428 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml for managed-node2 => (item=user_quadlet_pod) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:4 Monday 06 January 2025 13:47:11 -0500 (0:00:00.090) 0:02:20.519 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_linger_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set cancel linger vars] *************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:11 Monday 06 January 2025 13:47:11 -0500 (0:00:00.049) 0:02:20.568 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_xdg_runtime_dir": "/run/user/2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:16 Monday 06 January 2025 13:47:11 -0500 (0:00:00.067) 0:02:20.635 ******** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Gather facts for containers] ********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:21 Monday 06 January 2025 13:47:11 -0500 (0:00:00.412) 0:02:21.048 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Gather facts for networks] ************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:30 Monday 06 January 2025 13:47:11 -0500 (0:00:00.057) 0:02:21.106 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Gather secrets] *********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:40 Monday 06 January 2025 13:47:11 -0500 (0:00:00.041) 0:02:21.147 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger if no more resources are in use] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:50 Monday 06 January 2025 13:47:11 -0500 (0:00:00.039) 0:02:21.187 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_xdg_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Wait for user session to exit closing state] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:62 Monday 06 January 2025 13:47:12 -0500 (0:00:00.040) 0:02:21.228 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__cancel_linger is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop logind] ************************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:76 Monday 06 January 2025 13:47:12 -0500 (0:00:00.038) 0:02:21.266 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__cancel_linger is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Wait for user session to exit closing state] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:81 Monday 06 January 2025 13:47:12 -0500 (0:00:00.032) 0:02:21.299 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__cancel_linger is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Restart logind] *********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:92 Monday 06 January 2025 13:47:12 -0500 (0:00:00.033) 0:02:21.332 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__cancel_linger is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Monday 06 January 2025 13:47:12 -0500 (0:00:00.034) 0:02:21.366 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Monday 06 January 2025 13:47:12 -0500 (0:00:00.030) 0:02:21.397 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Remove test user] ******************************************************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:168 Monday 06 January 2025 13:47:12 -0500 (0:00:00.124) 0:02:21.521 ******** changed: [managed-node2] => { "changed": true, "force": false, "name": "user_quadlet_pod", "remove": false, "state": "absent" } TASK [Cleanup system - root] *************************************************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:173 Monday 06 January 2025 13:47:12 -0500 (0:00:00.471) 0:02:21.993 ******** included: fedora.linux_system_roles.podman for managed-node2 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Monday 06 January 2025 13:47:12 -0500 (0:00:00.103) 0:02:22.096 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Monday 06 January 2025 13:47:12 -0500 (0:00:00.057) 0:02:22.154 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Monday 06 January 2025 13:47:12 -0500 (0:00:00.042) 0:02:22.196 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Monday 06 January 2025 13:47:13 -0500 (0:00:00.040) 0:02:22.236 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Monday 06 January 2025 13:47:13 -0500 (0:00:00.055) 0:02:22.292 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Monday 06 January 2025 13:47:13 -0500 (0:00:00.055) 0:02:22.347 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Monday 06 January 2025 13:47:13 -0500 (0:00:00.057) 0:02:22.404 ******** ok: [managed-node2] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Monday 06 January 2025 13:47:13 -0500 (0:00:00.122) 0:02:22.526 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Monday 06 January 2025 13:47:14 -0500 (0:00:00.783) 0:02:23.310 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Monday 06 January 2025 13:47:14 -0500 (0:00:00.035) 0:02:23.345 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Monday 06 January 2025 13:47:14 -0500 (0:00:00.112) 0:02:23.458 ******** skipping: [managed-node2] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Monday 06 January 2025 13:47:14 -0500 (0:00:00.059) 0:02:23.517 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Monday 06 January 2025 13:47:14 -0500 (0:00:00.061) 0:02:23.578 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Monday 06 January 2025 13:47:14 -0500 (0:00:00.060) 0:02:23.639 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.024612", "end": "2025-01-06 13:47:14.757982", "rc": 0, "start": "2025-01-06 13:47:14.733370" } STDOUT: podman version 5.3.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Monday 06 January 2025 13:47:14 -0500 (0:00:00.412) 0:02:24.051 ******** ok: [managed-node2] => { "ansible_facts": { "podman_version": "5.3.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Monday 06 January 2025 13:47:14 -0500 (0:00:00.062) 0:02:24.114 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Monday 06 January 2025 13:47:14 -0500 (0:00:00.057) 0:02:24.171 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Monday 06 January 2025 13:47:15 -0500 (0:00:00.057) 0:02:24.229 ******** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Monday 06 January 2025 13:47:15 -0500 (0:00:00.057) 0:02:24.287 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"5.0\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Monday 06 January 2025 13:47:15 -0500 (0:00:00.072) 0:02:24.359 ******** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Monday 06 January 2025 13:47:15 -0500 (0:00:00.063) 0:02:24.423 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 06 January 2025 13:47:15 -0500 (0:00:00.066) 0:02:24.490 ******** ok: [managed-node2] => { "ansible_facts": { "getent_passwd": { "root": [ "x", "0", "0", "Super User", "/root", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 06 January 2025 13:47:15 -0500 (0:00:00.377) 0:02:24.868 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Debug] ******************************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Monday 06 January 2025 13:47:15 -0500 (0:00:00.051) 0:02:24.920 ******** ok: [managed-node2] => {} MSG: item {} TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:20 Monday 06 January 2025 13:47:15 -0500 (0:00:00.114) 0:02:25.034 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35 Monday 06 January 2025 13:47:15 -0500 (0:00:00.057) 0:02:25.091 ******** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736188886.4685817, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736188861.3885152, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3940759558", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:46 Monday 06 January 2025 13:47:16 -0500 (0:00:00.384) 0:02:25.476 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:51 Monday 06 January 2025 13:47:16 -0500 (0:00:00.038) 0:02:25.515 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:56 Monday 06 January 2025 13:47:16 -0500 (0:00:00.039) 0:02:25.554 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:69 Monday 06 January 2025 13:47:16 -0500 (0:00:00.057) 0:02:25.612 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74 Monday 06 January 2025 13:47:16 -0500 (0:00:00.061) 0:02:25.674 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79 Monday 06 January 2025 13:47:16 -0500 (0:00:00.051) 0:02:25.725 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Monday 06 January 2025 13:47:16 -0500 (0:00:00.041) 0:02:25.767 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:96 Monday 06 January 2025 13:47:16 -0500 (0:00:00.045) 0:02:25.812 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Monday 06 January 2025 13:47:16 -0500 (0:00:00.048) 0:02:25.861 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Monday 06 January 2025 13:47:16 -0500 (0:00:00.050) 0:02:25.912 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Monday 06 January 2025 13:47:16 -0500 (0:00:00.066) 0:02:25.978 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Monday 06 January 2025 13:47:16 -0500 (0:00:00.034) 0:02:26.013 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Monday 06 January 2025 13:47:16 -0500 (0:00:00.034) 0:02:26.048 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Monday 06 January 2025 13:47:16 -0500 (0:00:00.116) 0:02:26.164 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Monday 06 January 2025 13:47:16 -0500 (0:00:00.036) 0:02:26.200 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Monday 06 January 2025 13:47:17 -0500 (0:00:00.034) 0:02:26.234 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Monday 06 January 2025 13:47:17 -0500 (0:00:00.065) 0:02:26.300 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Monday 06 January 2025 13:47:17 -0500 (0:00:00.039) 0:02:26.340 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Monday 06 January 2025 13:47:17 -0500 (0:00:00.058) 0:02:26.399 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Monday 06 January 2025 13:47:17 -0500 (0:00:00.113) 0:02:26.512 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Monday 06 January 2025 13:47:17 -0500 (0:00:00.057) 0:02:26.569 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Monday 06 January 2025 13:47:17 -0500 (0:00:00.057) 0:02:26.627 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Monday 06 January 2025 13:47:17 -0500 (0:00:00.056) 0:02:26.684 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Monday 06 January 2025 13:47:17 -0500 (0:00:00.055) 0:02:26.740 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Monday 06 January 2025 13:47:17 -0500 (0:00:00.054) 0:02:26.794 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Monday 06 January 2025 13:47:17 -0500 (0:00:00.060) 0:02:26.855 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Monday 06 January 2025 13:47:17 -0500 (0:00:00.074) 0:02:26.929 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Monday 06 January 2025 13:47:17 -0500 (0:00:00.127) 0:02:27.057 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Monday 06 January 2025 13:47:17 -0500 (0:00:00.036) 0:02:27.093 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Monday 06 January 2025 13:47:17 -0500 (0:00:00.035) 0:02:27.129 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Monday 06 January 2025 13:47:17 -0500 (0:00:00.037) 0:02:27.166 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Monday 06 January 2025 13:47:18 -0500 (0:00:00.105) 0:02:27.272 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "quadlet-pod-container", "Exec": "/bin/busybox-extras httpd -f -p 80", "Image": "quay.io/libpod/testimage:20210610", "Pod": "quadlet-pod-pod.pod" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Monday 06 January 2025 13:47:18 -0500 (0:00:00.045) 0:02:27.318 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Monday 06 January 2025 13:47:18 -0500 (0:00:00.043) 0:02:27.361 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Monday 06 January 2025 13:47:18 -0500 (0:00:00.038) 0:02:27.400 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-container", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Monday 06 January 2025 13:47:18 -0500 (0:00:00.071) 0:02:27.471 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 06 January 2025 13:47:18 -0500 (0:00:00.077) 0:02:27.548 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 06 January 2025 13:47:18 -0500 (0:00:00.047) 0:02:27.596 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Debug] ******************************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Monday 06 January 2025 13:47:18 -0500 (0:00:00.044) 0:02:27.640 ******** ok: [managed-node2] => {} MSG: item { "Container": { "ContainerName": "quadlet-pod-container", "Exec": "/bin/busybox-extras httpd -f -p 80", "Image": "quay.io/libpod/testimage:20210610", "Pod": "quadlet-pod-pod.pod" }, "Install": { "WantedBy": "default.target" }, "name": "quadlet-pod-container", "state": "absent", "type": "container" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:20 Monday 06 January 2025 13:47:18 -0500 (0:00:00.037) 0:02:27.677 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35 Monday 06 January 2025 13:47:18 -0500 (0:00:00.046) 0:02:27.724 ******** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736188886.4685817, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736188861.3885152, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3940759558", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:46 Monday 06 January 2025 13:47:18 -0500 (0:00:00.381) 0:02:28.105 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:51 Monday 06 January 2025 13:47:18 -0500 (0:00:00.093) 0:02:28.199 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:56 Monday 06 January 2025 13:47:19 -0500 (0:00:00.056) 0:02:28.255 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:69 Monday 06 January 2025 13:47:19 -0500 (0:00:00.057) 0:02:28.313 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74 Monday 06 January 2025 13:47:19 -0500 (0:00:00.059) 0:02:28.372 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79 Monday 06 January 2025 13:47:19 -0500 (0:00:00.060) 0:02:28.433 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Monday 06 January 2025 13:47:19 -0500 (0:00:00.058) 0:02:28.492 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:96 Monday 06 January 2025 13:47:19 -0500 (0:00:00.059) 0:02:28.551 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Monday 06 January 2025 13:47:19 -0500 (0:00:00.058) 0:02:28.610 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/libpod/testimage:20210610" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-container.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Monday 06 January 2025 13:47:19 -0500 (0:00:00.091) 0:02:28.701 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Monday 06 January 2025 13:47:19 -0500 (0:00:00.060) 0:02:28.762 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Monday 06 January 2025 13:47:19 -0500 (0:00:00.054) 0:02:28.817 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [ "quay.io/libpod/testimage:20210610" ], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-pod-container.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Monday 06 January 2025 13:47:19 -0500 (0:00:00.141) 0:02:28.958 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Monday 06 January 2025 13:47:19 -0500 (0:00:00.067) 0:02:29.025 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Monday 06 January 2025 13:47:19 -0500 (0:00:00.131) 0:02:29.157 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Monday 06 January 2025 13:47:19 -0500 (0:00:00.049) 0:02:29.207 ******** changed: [managed-node2] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-pod-container.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Mon 2025-01-06 13:45:13 EST", "ActiveEnterTimestampMonotonic": "572437437", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "system.slice network-online.target systemd-journald.socket sysinit.target -.mount quadlet-pod-pod-pod.service basic.target", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Mon 2025-01-06 13:45:13 EST", "AssertTimestampMonotonic": "572312484", "Before": "shutdown.target multi-user.target", "BindsTo": "quadlet-pod-pod-pod.service", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "95802000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Mon 2025-01-06 13:45:13 EST", "ConditionTimestampMonotonic": "572312481", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/quadlet-pod-container.service", "ControlGroupId": "10608", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuset io memory pids", "Description": "quadlet-pod-container.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveCPUs": "0-1", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveMemoryNodes": "0", "EffectiveTasksMax": "22349", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "33681", "ExecMainStartTimestamp": "Mon 2025-01-06 13:45:13 EST", "ExecMainStartTimestampMonotonic": "572437395", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; ignore_errors=no ; start_time=[Mon 2025-01-06 13:45:13 EST] ; stop_time=[n/a] ; pid=33672 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; flags= ; start_time=[Mon 2025-01-06 13:45:13 EST] ; stop_time=[n/a] ; pid=33672 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-pod-container.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-pod-container.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-pod-container.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-pod-container.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-pod-container.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-container.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Mon 2025-01-06 13:45:13 EST", "InactiveExitTimestampMonotonic": "572322039", "InvocationID": "b20f4d59b28a437faf7d248b01aab852", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "33681", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3160645632", "MemoryCurrent": "851968", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "20865024", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-container.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "continue", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice -.mount sysinit.target", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-pod-container.container", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Mon 2025-01-06 13:45:13 EST", "StateChangeTimestampMonotonic": "572437437", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-container", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "quadlet-pod-pod-pod.service multi-user.target", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Monday 06 January 2025 13:47:31 -0500 (0:00:11.344) 0:02:40.551 ******** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736189114.0270116, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "f0b5c8159fc3c65bf9310a371751609e4c1ba4c3", "ctime": 1736189111.7860186, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 37749006, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1736189111.4710195, "nlink": 1, "path": "/etc/containers/systemd/quadlet-pod-container.container", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 230, "uid": 0, "version": "1348583525", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Monday 06 January 2025 13:47:31 -0500 (0:00:00.409) 0:02:40.960 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Monday 06 January 2025 13:47:31 -0500 (0:00:00.096) 0:02:41.056 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Monday 06 January 2025 13:47:32 -0500 (0:00:00.375) 0:02:41.432 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Monday 06 January 2025 13:47:32 -0500 (0:00:00.057) 0:02:41.489 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Monday 06 January 2025 13:47:32 -0500 (0:00:00.042) 0:02:41.532 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Monday 06 January 2025 13:47:32 -0500 (0:00:00.042) 0:02:41.575 ******** changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/quadlet-pod-container.container", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Monday 06 January 2025 13:47:32 -0500 (0:00:00.373) 0:02:41.948 ******** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Monday 06 January 2025 13:47:33 -0500 (0:00:00.778) 0:02:42.726 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Monday 06 January 2025 13:47:33 -0500 (0:00:00.456) 0:02:43.183 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Monday 06 January 2025 13:47:34 -0500 (0:00:00.051) 0:02:43.234 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Monday 06 January 2025 13:47:34 -0500 (0:00:00.036) 0:02:43.271 ******** changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.028319", "end": "2025-01-06 13:47:34.385467", "rc": 0, "start": "2025-01-06 13:47:34.357148" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Monday 06 January 2025 13:47:34 -0500 (0:00:00.404) 0:02:43.675 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Monday 06 January 2025 13:47:34 -0500 (0:00:00.077) 0:02:43.752 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Monday 06 January 2025 13:47:34 -0500 (0:00:00.051) 0:02:43.804 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Monday 06 January 2025 13:47:34 -0500 (0:00:00.148) 0:02:43.952 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Monday 06 January 2025 13:47:34 -0500 (0:00:00.060) 0:02:44.013 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.030728", "end": "2025-01-06 13:47:35.161876", "rc": 0, "start": "2025-01-06 13:47:35.131148" } STDOUT: localhost/podman-pause 5.3.1-1733097600 178cbff819e5 5 minutes ago 701 kB quay.io/libpod/testimage 20210610 9f9ec7f2fdef 3 years ago 7.99 MB TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Monday 06 January 2025 13:47:35 -0500 (0:00:00.440) 0:02:44.453 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.028129", "end": "2025-01-06 13:47:35.564375", "rc": 0, "start": "2025-01-06 13:47:35.536246" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Monday 06 January 2025 13:47:35 -0500 (0:00:00.429) 0:02:44.883 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.035038", "end": "2025-01-06 13:47:36.030090", "rc": 0, "start": "2025-01-06 13:47:35.995052" } STDOUT: 165acae01c44 localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 0ee319c08104-service aefe2eb0d74a localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp ce8aeae07491-infra fb8f4012b2f4 quay.io/libpod/testimage:20210610 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp httpd2-httpd2 d161320bca5e localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 1345b7736131-service b6ada9a42cbe localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 0.0.0.0:15003->80/tcp ea72115d828a-infra 16cf55b7148e quay.io/libpod/testimage:20210610 2 minutes ago Up 2 minutes 0.0.0.0:15003->80/tcp httpd3-httpd3 TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Monday 06 January 2025 13:47:36 -0500 (0:00:00.448) 0:02:45.331 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.027248", "end": "2025-01-06 13:47:36.451843", "rc": 0, "start": "2025-01-06 13:47:36.424595" } STDOUT: podman podman-default-kube-network TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Monday 06 January 2025 13:47:36 -0500 (0:00:00.418) 0:02:45.750 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Monday 06 January 2025 13:47:36 -0500 (0:00:00.419) 0:02:46.170 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Monday 06 January 2025 13:47:37 -0500 (0:00:00.437) 0:02:46.607 ******** ok: [managed-node2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "running", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-pod-container.service": { "name": "quadlet-pod-container.service", "source": "systemd", "state": "stopped", "status": "failed" }, "quadlet-pod-pod-pod.service": { "name": "quadlet-pod-pod-pod.service", "source": "systemd", "state": "inactive", "status": "generated" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Monday 06 January 2025 13:47:39 -0500 (0:00:02.034) 0:02:48.642 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Monday 06 January 2025 13:47:39 -0500 (0:00:00.033) 0:02:48.676 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Pod": { "PodName": "quadlet-pod" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Monday 06 January 2025 13:47:39 -0500 (0:00:00.046) 0:02:48.722 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Monday 06 January 2025 13:47:39 -0500 (0:00:00.046) 0:02:48.768 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Monday 06 January 2025 13:47:39 -0500 (0:00:00.046) 0:02:48.815 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-pod", "__podman_quadlet_type": "pod", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Monday 06 January 2025 13:47:39 -0500 (0:00:00.073) 0:02:48.888 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 06 January 2025 13:47:39 -0500 (0:00:00.078) 0:02:48.967 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 06 January 2025 13:47:39 -0500 (0:00:00.110) 0:02:49.077 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Debug] ******************************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Monday 06 January 2025 13:47:39 -0500 (0:00:00.039) 0:02:49.117 ******** ok: [managed-node2] => {} MSG: item { "Pod": { "PodName": "quadlet-pod" }, "name": "quadlet-pod-pod", "state": "absent", "type": "pod" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:20 Monday 06 January 2025 13:47:39 -0500 (0:00:00.040) 0:02:49.158 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:35 Monday 06 January 2025 13:47:39 -0500 (0:00:00.055) 0:02:49.214 ******** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736188886.4685817, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736188861.3885152, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3940759558", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:46 Monday 06 January 2025 13:47:40 -0500 (0:00:00.405) 0:02:49.619 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:51 Monday 06 January 2025 13:47:40 -0500 (0:00:00.065) 0:02:49.684 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:56 Monday 06 January 2025 13:47:40 -0500 (0:00:00.063) 0:02:49.748 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:69 Monday 06 January 2025 13:47:40 -0500 (0:00:00.061) 0:02:49.810 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:74 Monday 06 January 2025 13:47:40 -0500 (0:00:00.063) 0:02:49.873 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:79 Monday 06 January 2025 13:47:40 -0500 (0:00:00.059) 0:02:49.932 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Monday 06 January 2025 13:47:40 -0500 (0:00:00.059) 0:02:49.991 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:96 Monday 06 January 2025 13:47:40 -0500 (0:00:00.058) 0:02:50.050 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Monday 06 January 2025 13:47:40 -0500 (0:00:00.066) 0:02:50.117 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-pod-pod.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Monday 06 January 2025 13:47:40 -0500 (0:00:00.092) 0:02:50.209 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Monday 06 January 2025 13:47:41 -0500 (0:00:00.071) 0:02:50.281 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Monday 06 January 2025 13:47:41 -0500 (0:00:00.054) 0:02:50.335 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-pod-pod.pod", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Monday 06 January 2025 13:47:41 -0500 (0:00:00.124) 0:02:50.459 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Monday 06 January 2025 13:47:41 -0500 (0:00:00.139) 0:02:50.599 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Monday 06 January 2025 13:47:41 -0500 (0:00:00.097) 0:02:50.696 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Monday 06 January 2025 13:47:41 -0500 (0:00:00.036) 0:02:50.733 ******** changed: [managed-node2] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-pod-pod-pod.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "sysinit.target -.mount systemd-journald.socket network-online.target basic.target system.slice", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-pod-pod-pod.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698233344", "EffectiveMemoryMax": "3698233344", "EffectiveTasksMax": "22349", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/quadlet-pod-pod-pod.pod-id ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/quadlet-pod-pod-pod.pod-id ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPre": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/quadlet-pod-pod-pod.pid --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPreEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/quadlet-pod-pod-pod.pid --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --force ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --force ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-pod-pod-pod.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-pod-pod.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3144216576", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-pod-pod.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/quadlet-pod-pod-pod.pid", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "-.mount sysinit.target system.slice", "RequiresMountsFor": "/run/containers", "Restart": "on-failure", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-pod-pod.pod", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-pod-pod", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "forking", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Monday 06 January 2025 13:47:42 -0500 (0:00:00.818) 0:02:51.551 ******** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736189115.3750074, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "1884c880482430d8bf2e944b003734fb8b7a462d", "ctime": 1736189103.8230433, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 612368590, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1736189103.4530447, "nlink": 1, "path": "/etc/containers/systemd/quadlet-pod-pod.pod", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 70, "uid": 0, "version": "3251686538", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Monday 06 January 2025 13:47:42 -0500 (0:00:00.428) 0:02:51.980 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Monday 06 January 2025 13:47:42 -0500 (0:00:00.104) 0:02:52.084 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Monday 06 January 2025 13:47:43 -0500 (0:00:00.375) 0:02:52.460 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Monday 06 January 2025 13:47:43 -0500 (0:00:00.075) 0:02:52.535 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Monday 06 January 2025 13:47:43 -0500 (0:00:00.048) 0:02:52.584 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Monday 06 January 2025 13:47:43 -0500 (0:00:00.041) 0:02:52.625 ******** changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/quadlet-pod-pod.pod", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Monday 06 January 2025 13:47:43 -0500 (0:00:00.386) 0:02:53.012 ******** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Monday 06 January 2025 13:47:44 -0500 (0:00:00.794) 0:02:53.807 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Monday 06 January 2025 13:47:45 -0500 (0:00:00.468) 0:02:54.276 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Monday 06 January 2025 13:47:45 -0500 (0:00:00.070) 0:02:54.347 ******** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Monday 06 January 2025 13:47:45 -0500 (0:00:00.111) 0:02:54.458 ******** changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.033823", "end": "2025-01-06 13:47:45.619587", "rc": 0, "start": "2025-01-06 13:47:45.585764" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Monday 06 January 2025 13:47:45 -0500 (0:00:00.466) 0:02:54.924 ******** included: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Monday 06 January 2025 13:47:45 -0500 (0:00:00.061) 0:02:54.986 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Monday 06 January 2025 13:47:45 -0500 (0:00:00.033) 0:02:55.020 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Monday 06 January 2025 13:47:45 -0500 (0:00:00.034) 0:02:55.055 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Monday 06 January 2025 13:47:45 -0500 (0:00:00.034) 0:02:55.089 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.031783", "end": "2025-01-06 13:47:46.206311", "rc": 0, "start": "2025-01-06 13:47:46.174528" } STDOUT: localhost/podman-pause 5.3.1-1733097600 178cbff819e5 5 minutes ago 701 kB quay.io/libpod/testimage 20210610 9f9ec7f2fdef 3 years ago 7.99 MB TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Monday 06 January 2025 13:47:46 -0500 (0:00:00.410) 0:02:55.500 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.027198", "end": "2025-01-06 13:47:46.607887", "rc": 0, "start": "2025-01-06 13:47:46.580689" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Monday 06 January 2025 13:47:46 -0500 (0:00:00.399) 0:02:55.900 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.033607", "end": "2025-01-06 13:47:47.019365", "rc": 0, "start": "2025-01-06 13:47:46.985758" } STDOUT: 165acae01c44 localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 0ee319c08104-service aefe2eb0d74a localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp ce8aeae07491-infra fb8f4012b2f4 quay.io/libpod/testimage:20210610 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp httpd2-httpd2 d161320bca5e localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 1345b7736131-service b6ada9a42cbe localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 0.0.0.0:15003->80/tcp ea72115d828a-infra 16cf55b7148e quay.io/libpod/testimage:20210610 3 minutes ago Up 3 minutes 0.0.0.0:15003->80/tcp httpd3-httpd3 TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Monday 06 January 2025 13:47:47 -0500 (0:00:00.425) 0:02:56.325 ******** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.027099", "end": "2025-01-06 13:47:47.444785", "rc": 0, "start": "2025-01-06 13:47:47.417686" } STDOUT: podman podman-default-kube-network TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Monday 06 January 2025 13:47:47 -0500 (0:00:00.423) 0:02:56.749 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Monday 06 January 2025 13:47:47 -0500 (0:00:00.428) 0:02:57.178 ******** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Monday 06 January 2025 13:47:48 -0500 (0:00:00.451) 0:02:57.629 ******** ok: [managed-node2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-pod-container.service": { "name": "quadlet-pod-container.service", "source": "systemd", "state": "stopped", "status": "failed" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Monday 06 January 2025 13:47:50 -0500 (0:00:02.071) 0:02:59.700 ******** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Monday 06 January 2025 13:47:50 -0500 (0:00:00.055) 0:02:59.757 ******** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Monday 06 January 2025 13:47:50 -0500 (0:00:00.053) 0:02:59.810 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Monday 06 January 2025 13:47:50 -0500 (0:00:00.036) 0:02:59.847 ******** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Ensure no resources] ***************************************************** task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:182 Monday 06 January 2025 13:47:50 -0500 (0:00:00.114) 0:02:59.961 ******** fatal: [managed-node2]: FAILED! => { "assertion": "__podman_test_debug_images.stdout == \"\"", "changed": false, "evaluated_to": false } MSG: Assertion failed TASK [Dump journal] ************************************************************ task path: /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:194 Monday 06 January 2025 13:47:50 -0500 (0:00:00.045) 0:03:00.006 ******** fatal: [managed-node2]: FAILED! => { "changed": false, "cmd": [ "journalctl", "-ex" ], "delta": "0:00:00.032164", "end": "2025-01-06 13:47:51.111551", "failed_when_result": true, "rc": 0, "start": "2025-01-06 13:47:51.079387" } STDOUT: Jan 06 13:43:58 managed-node2 /usr/bin/podman[25007]: time="2025-01-06T13:43:58-05:00" level=debug msg="Using transient store: false" Jan 06 13:43:58 managed-node2 /usr/bin/podman[25007]: time="2025-01-06T13:43:58-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jan 06 13:43:58 managed-node2 /usr/bin/podman[25007]: time="2025-01-06T13:43:58-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 06 13:43:58 managed-node2 /usr/bin/podman[25007]: time="2025-01-06T13:43:58-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 06 13:43:58 managed-node2 /usr/bin/podman[25007]: time="2025-01-06T13:43:58-05:00" level=debug msg="Cached value indicated that metacopy is not being used" Jan 06 13:43:58 managed-node2 /usr/bin/podman[25007]: time="2025-01-06T13:43:58-05:00" level=debug msg="Cached value indicated that native-diff is usable" Jan 06 13:43:58 managed-node2 /usr/bin/podman[25007]: time="2025-01-06T13:43:58-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" Jan 06 13:43:58 managed-node2 /usr/bin/podman[25007]: time="2025-01-06T13:43:58-05:00" level=debug msg="Initializing event backend file" Jan 06 13:43:58 managed-node2 /usr/bin/podman[25007]: time="2025-01-06T13:43:58-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jan 06 13:43:58 managed-node2 /usr/bin/podman[25007]: time="2025-01-06T13:43:58-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jan 06 13:43:58 managed-node2 /usr/bin/podman[25007]: time="2025-01-06T13:43:58-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jan 06 13:43:58 managed-node2 /usr/bin/podman[25007]: time="2025-01-06T13:43:58-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Jan 06 13:43:58 managed-node2 /usr/bin/podman[25007]: time="2025-01-06T13:43:58-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jan 06 13:43:58 managed-node2 /usr/bin/podman[25007]: time="2025-01-06T13:43:58-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jan 06 13:43:58 managed-node2 /usr/bin/podman[25007]: time="2025-01-06T13:43:58-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jan 06 13:43:58 managed-node2 /usr/bin/podman[25007]: time="2025-01-06T13:43:58-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jan 06 13:43:58 managed-node2 /usr/bin/podman[25007]: time="2025-01-06T13:43:58-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Jan 06 13:43:58 managed-node2 /usr/bin/podman[25007]: time="2025-01-06T13:43:58-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Jan 06 13:43:58 managed-node2 systemd[23127]: Stopping libpod-conmon-2e73b18341ba5007b4e6d1c8b4d4703e9ea1016e5e17abb0f0be2c32f91ee309.scope... ░░ Subject: A stop job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has begun execution. ░░ ░░ The job identifier is 85. Jan 06 13:43:58 managed-node2 systemd[23127]: Stopped libpod-conmon-2e73b18341ba5007b4e6d1c8b4d4703e9ea1016e5e17abb0f0be2c32f91ee309.scope. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 85 and the job result is done. Jan 06 13:43:58 managed-node2 systemd[23127]: Removed slice user-libpod_pod_1f1c124dbb9d329306cbc91d9980fbcc0c1d01bdcf785fa336a8475cd8fc8434.slice - cgroup user-libpod_pod_1f1c124dbb9d329306cbc91d9980fbcc0c1d01bdcf785fa336a8475cd8fc8434.slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 84 and the job result is done. Jan 06 13:43:58 managed-node2 systemd[23127]: user-libpod_pod_1f1c124dbb9d329306cbc91d9980fbcc0c1d01bdcf785fa336a8475cd8fc8434.slice: Failed to open /run/user/3001/systemd/transient/user-libpod_pod_1f1c124dbb9d329306cbc91d9980fbcc0c1d01bdcf785fa336a8475cd8fc8434.slice: No such file or directory Jan 06 13:43:58 managed-node2 systemd[23127]: user-libpod_pod_1f1c124dbb9d329306cbc91d9980fbcc0c1d01bdcf785fa336a8475cd8fc8434.slice: Failed to open /run/user/3001/systemd/transient/user-libpod_pod_1f1c124dbb9d329306cbc91d9980fbcc0c1d01bdcf785fa336a8475cd8fc8434.slice: No such file or directory Jan 06 13:43:58 managed-node2 systemd[23127]: user-libpod_pod_1f1c124dbb9d329306cbc91d9980fbcc0c1d01bdcf785fa336a8475cd8fc8434.slice: Failed to open /run/user/3001/systemd/transient/user-libpod_pod_1f1c124dbb9d329306cbc91d9980fbcc0c1d01bdcf785fa336a8475cd8fc8434.slice: No such file or directory Jan 06 13:43:58 managed-node2 podman[24978]: Pods stopped: Jan 06 13:43:58 managed-node2 podman[24978]: 1f1c124dbb9d329306cbc91d9980fbcc0c1d01bdcf785fa336a8475cd8fc8434 Jan 06 13:43:58 managed-node2 podman[24978]: Pods removed: Jan 06 13:43:58 managed-node2 podman[24978]: 1f1c124dbb9d329306cbc91d9980fbcc0c1d01bdcf785fa336a8475cd8fc8434 Jan 06 13:43:58 managed-node2 podman[24978]: Secrets removed: Jan 06 13:43:58 managed-node2 podman[24978]: Volumes removed: Jan 06 13:43:59 managed-node2 systemd[23127]: Created slice user-libpod_pod_50c9e0f960aa547a184f03b40fbb9e936646546e35834968864d8e905966058a.slice - cgroup user-libpod_pod_50c9e0f960aa547a184f03b40fbb9e936646546e35834968864d8e905966058a.slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 86. Jan 06 13:43:59 managed-node2 systemd[23127]: Started libpod-0adb1b139ed69796cee3f61f67b8e076647ee4298791252c30650ea08aa9bb1d.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 90. Jan 06 13:43:59 managed-node2 systemd[23127]: Started rootless-netns-e3da788f.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 94. Jan 06 13:43:59 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 06 13:43:59 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 06 13:43:59 managed-node2 kernel: veth0: entered allmulticast mode Jan 06 13:43:59 managed-node2 kernel: veth0: entered promiscuous mode Jan 06 13:43:59 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 06 13:43:59 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Jan 06 13:43:59 managed-node2 systemd[23127]: Started run-r7d8bfc637a834d89964f8411a8eca0bb.scope - /usr/libexec/podman/aardvark-dns --config /run/user/3001/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 98. Jan 06 13:43:59 managed-node2 systemd[23127]: Started libpod-79c44477873f5791386ae415c24a00ac98b4fd3327dae666b2fef75d315ba237.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 102. Jan 06 13:43:59 managed-node2 systemd[23127]: Started libpod-b9313408d2daad537774028032dd69ec160d39af18c66a5029afc47a44603923.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 107. Jan 06 13:43:59 managed-node2 podman[24978]: Pod: Jan 06 13:43:59 managed-node2 podman[24978]: 50c9e0f960aa547a184f03b40fbb9e936646546e35834968864d8e905966058a Jan 06 13:43:59 managed-node2 podman[24978]: Container: Jan 06 13:43:59 managed-node2 podman[24978]: b9313408d2daad537774028032dd69ec160d39af18c66a5029afc47a44603923 Jan 06 13:43:59 managed-node2 systemd[23127]: Started podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 71. Jan 06 13:43:59 managed-node2 sudo[24972]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 06 13:44:00 managed-node2 python3.12[25195]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jan 06 13:44:01 managed-node2 python3.12[25327]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 06 13:44:02 managed-node2 python3.12[25460]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd2.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:44:03 managed-node2 python3.12[25592]: ansible-file Invoked with path=/tmp/lsr__vern8jf_podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 06 13:44:03 managed-node2 python3.12[25723]: ansible-file Invoked with path=/tmp/lsr__vern8jf_podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 06 13:44:04 managed-node2 systemd[4479]: Created slice background.slice - User Background Tasks Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 14. Jan 06 13:44:04 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 06 13:44:04 managed-node2 systemd[4479]: Starting systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Jan 06 13:44:04 managed-node2 systemd[4479]: Finished systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Jan 06 13:44:04 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 06 13:44:05 managed-node2 podman[25889]: 2025-01-06 13:44:05.867034878 -0500 EST m=+1.264593500 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 06 13:44:05 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 06 13:44:05 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 06 13:44:06 managed-node2 python3.12[26035]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 06 13:44:06 managed-node2 python3.12[26166]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 06 13:44:07 managed-node2 python3.12[26297]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 06 13:44:07 managed-node2 python3.12[26402]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd2.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736189046.995774-15455-257716526560516/.source.yml _original_basename=._ubrtp6r follow=False checksum=595960bb2ca5b5259de62cb1981c272bc93f1de5 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 06 13:44:08 managed-node2 python3.12[26533]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 06 13:44:08 managed-node2 systemd[1]: Created slice machine-libpod_pod_d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c.slice - cgroup machine-libpod_pod_d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c.slice. ░░ Subject: A start job for unit machine-libpod_pod_d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c.slice has finished successfully. ░░ ░░ The job identifier is 1991. Jan 06 13:44:08 managed-node2 podman[26540]: 2025-01-06 13:44:08.169717745 -0500 EST m=+0.071299908 container create 56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c (image=localhost/podman-pause:5.3.1-1733097600, name=d2912d1edd53-infra, pod_id=d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c, io.buildah.version=1.38.0) Jan 06 13:44:08 managed-node2 podman[26540]: 2025-01-06 13:44:08.175156767 -0500 EST m=+0.076738882 pod create d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c (image=, name=httpd2) Jan 06 13:44:08 managed-node2 podman[26540]: 2025-01-06 13:44:08.177820468 -0500 EST m=+0.079402775 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 06 13:44:08 managed-node2 podman[26540]: 2025-01-06 13:44:08.294669883 -0500 EST m=+0.196252070 container create 869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test) Jan 06 13:44:08 managed-node2 NetworkManager[778]: [1736189048.3139] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/3) Jan 06 13:44:08 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 06 13:44:08 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 06 13:44:08 managed-node2 kernel: veth0: entered allmulticast mode Jan 06 13:44:08 managed-node2 kernel: veth0: entered promiscuous mode Jan 06 13:44:08 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 06 13:44:08 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Jan 06 13:44:08 managed-node2 NetworkManager[778]: [1736189048.3274] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/4) Jan 06 13:44:08 managed-node2 NetworkManager[778]: [1736189048.3321] device (veth0): carrier: link connected Jan 06 13:44:08 managed-node2 NetworkManager[778]: [1736189048.3322] device (podman1): carrier: link connected Jan 06 13:44:08 managed-node2 (udev-worker)[26555]: Network interface NamePolicy= disabled on kernel command line. Jan 06 13:44:08 managed-node2 (udev-worker)[26556]: Network interface NamePolicy= disabled on kernel command line. Jan 06 13:44:08 managed-node2 NetworkManager[778]: [1736189048.3835] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 06 13:44:08 managed-node2 NetworkManager[778]: [1736189048.3840] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 06 13:44:08 managed-node2 NetworkManager[778]: [1736189048.3892] device (podman1): Activation: starting connection 'podman1' (020e1ad3-1250-43c9-baf6-9500f7227ec8) Jan 06 13:44:08 managed-node2 NetworkManager[778]: [1736189048.3896] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 06 13:44:08 managed-node2 NetworkManager[778]: [1736189048.3901] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 06 13:44:08 managed-node2 NetworkManager[778]: [1736189048.3905] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 06 13:44:08 managed-node2 NetworkManager[778]: [1736189048.3911] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 06 13:44:08 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 1998. Jan 06 13:44:08 managed-node2 systemd[1]: Started run-r8cc6d90b84984f11ac2f9ff64ef8e701.scope - /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-r8cc6d90b84984f11ac2f9ff64ef8e701.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-r8cc6d90b84984f11ac2f9ff64ef8e701.scope has finished successfully. ░░ ░░ The job identifier is 2077. Jan 06 13:44:08 managed-node2 aardvark-dns[26580]: starting aardvark on a child with pid 26582 Jan 06 13:44:08 managed-node2 aardvark-dns[26582]: Successfully parsed config Jan 06 13:44:08 managed-node2 aardvark-dns[26582]: Listen v4 ip {"podman-default-kube-network": [10.89.0.1]} Jan 06 13:44:08 managed-node2 aardvark-dns[26582]: Listen v6 ip {} Jan 06 13:44:08 managed-node2 aardvark-dns[26582]: Using the following upstream servers: [10.29.169.13:53, 10.29.170.12:53, 10.2.32.1:53] Jan 06 13:44:08 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 1998. Jan 06 13:44:08 managed-node2 NetworkManager[778]: [1736189048.4359] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 06 13:44:08 managed-node2 NetworkManager[778]: [1736189048.4362] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 06 13:44:08 managed-node2 NetworkManager[778]: [1736189048.4366] device (podman1): Activation: successful, device activated. Jan 06 13:44:08 managed-node2 systemd[1]: Started libpod-conmon-56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c.scope. ░░ Subject: A start job for unit libpod-conmon-56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c.scope has finished successfully. ░░ ░░ The job identifier is 2083. Jan 06 13:44:08 managed-node2 conmon[26596]: conmon 56b53421f5e78853110b : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/13/attach} Jan 06 13:44:08 managed-node2 conmon[26596]: conmon 56b53421f5e78853110b : terminal_ctrl_fd: 13 Jan 06 13:44:08 managed-node2 conmon[26596]: conmon 56b53421f5e78853110b : winsz read side: 17, winsz write side: 18 Jan 06 13:44:08 managed-node2 systemd[1]: Started libpod-56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c.scope - libcrun container. ░░ Subject: A start job for unit libpod-56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c.scope has finished successfully. ░░ ░░ The job identifier is 2090. Jan 06 13:44:08 managed-node2 conmon[26596]: conmon 56b53421f5e78853110b : container PID: 26598 Jan 06 13:44:08 managed-node2 podman[26540]: 2025-01-06 13:44:08.509197616 -0500 EST m=+0.410779782 container init 56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c (image=localhost/podman-pause:5.3.1-1733097600, name=d2912d1edd53-infra, pod_id=d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c, io.buildah.version=1.38.0) Jan 06 13:44:08 managed-node2 podman[26540]: 2025-01-06 13:44:08.512301138 -0500 EST m=+0.413883370 container start 56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c (image=localhost/podman-pause:5.3.1-1733097600, name=d2912d1edd53-infra, pod_id=d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c, io.buildah.version=1.38.0) Jan 06 13:44:08 managed-node2 systemd[1]: Started libpod-conmon-869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae.scope. ░░ Subject: A start job for unit libpod-conmon-869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae.scope has finished successfully. ░░ ░░ The job identifier is 2097. Jan 06 13:44:08 managed-node2 conmon[26601]: conmon 869e90da600068fe4d7d : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/12/attach} Jan 06 13:44:08 managed-node2 conmon[26601]: conmon 869e90da600068fe4d7d : terminal_ctrl_fd: 12 Jan 06 13:44:08 managed-node2 conmon[26601]: conmon 869e90da600068fe4d7d : winsz read side: 16, winsz write side: 17 Jan 06 13:44:08 managed-node2 systemd[1]: Started libpod-869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae.scope - libcrun container. ░░ Subject: A start job for unit libpod-869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae.scope has finished successfully. ░░ ░░ The job identifier is 2104. Jan 06 13:44:08 managed-node2 conmon[26601]: conmon 869e90da600068fe4d7d : container PID: 26603 Jan 06 13:44:08 managed-node2 podman[26540]: 2025-01-06 13:44:08.582683066 -0500 EST m=+0.484265255 container init 869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jan 06 13:44:08 managed-node2 podman[26540]: 2025-01-06 13:44:08.58558749 -0500 EST m=+0.487169846 container start 869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test) Jan 06 13:44:08 managed-node2 podman[26540]: 2025-01-06 13:44:08.590116274 -0500 EST m=+0.491698394 pod start d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c (image=, name=httpd2) Jan 06 13:44:08 managed-node2 python3.12[26533]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml Jan 06 13:44:08 managed-node2 python3.12[26533]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod: d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c Container: 869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae Jan 06 13:44:08 managed-node2 python3.12[26533]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2025-01-06T13:44:08-05:00" level=info msg="/usr/bin/podman filtering at log level debug" time="2025-01-06T13:44:08-05:00" level=debug msg="Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2025-01-06T13:44:08-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-01-06T13:44:08-05:00" level=info msg="Using sqlite as database backend" time="2025-01-06T13:44:08-05:00" level=debug msg="Using graph driver overlay" time="2025-01-06T13:44:08-05:00" level=debug msg="Using graph root /var/lib/containers/storage" time="2025-01-06T13:44:08-05:00" level=debug msg="Using run root /run/containers/storage" time="2025-01-06T13:44:08-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" time="2025-01-06T13:44:08-05:00" level=debug msg="Using tmp dir /run/libpod" time="2025-01-06T13:44:08-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" time="2025-01-06T13:44:08-05:00" level=debug msg="Using transient store: false" time="2025-01-06T13:44:08-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-01-06T13:44:08-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-01-06T13:44:08-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-01-06T13:44:08-05:00" level=debug msg="Cached value indicated that metacopy is being used" time="2025-01-06T13:44:08-05:00" level=debug msg="Cached value indicated that native-diff is not being used" time="2025-01-06T13:44:08-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" time="2025-01-06T13:44:08-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" time="2025-01-06T13:44:08-05:00" level=debug msg="Initializing event backend journald" time="2025-01-06T13:44:08-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-01-06T13:44:08-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-01-06T13:44:08-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" time="2025-01-06T13:44:08-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-01-06T13:44:08-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-01-06T13:44:08-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-01-06T13:44:08-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" time="2025-01-06T13:44:08-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-01-06T13:44:08-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-01-06T13:44:08-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" time="2025-01-06T13:44:08-05:00" level=info msg="Setting parallel job count to 7" time="2025-01-06T13:44:08-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network ade42f939bed2d59630be77b3c7699831acf428d3c9e3feacbb4575f00f127ab bridge podman1 2025-01-06 13:41:50.624946668 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-01-06T13:44:08-05:00" level=debug msg="Successfully loaded 2 networks" time="2025-01-06T13:44:08-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-06T13:44:08-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-06T13:44:08-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-06T13:44:08-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@178cbff819e53fa371dc14aee52228e121c1d319e55594a06d1802d4e095c69e\"" time="2025-01-06T13:44:08-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-06T13:44:08-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@178cbff819e53fa371dc14aee52228e121c1d319e55594a06d1802d4e095c69e)" time="2025-01-06T13:44:08-05:00" level=debug msg="exporting opaque data as blob \"sha256:178cbff819e53fa371dc14aee52228e121c1d319e55594a06d1802d4e095c69e\"" time="2025-01-06T13:44:08-05:00" level=debug msg="Pod using bridge network mode" time="2025-01-06T13:44:08-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c.slice for parent machine.slice and name libpod_pod_d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c" time="2025-01-06T13:44:08-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c.slice" time="2025-01-06T13:44:08-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c.slice" time="2025-01-06T13:44:08-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-06T13:44:08-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-06T13:44:08-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-06T13:44:08-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@178cbff819e53fa371dc14aee52228e121c1d319e55594a06d1802d4e095c69e\"" time="2025-01-06T13:44:08-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-06T13:44:08-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@178cbff819e53fa371dc14aee52228e121c1d319e55594a06d1802d4e095c69e)" time="2025-01-06T13:44:08-05:00" level=debug msg="exporting opaque data as blob \"sha256:178cbff819e53fa371dc14aee52228e121c1d319e55594a06d1802d4e095c69e\"" time="2025-01-06T13:44:08-05:00" level=debug msg="Inspecting image 178cbff819e53fa371dc14aee52228e121c1d319e55594a06d1802d4e095c69e" time="2025-01-06T13:44:08-05:00" level=debug msg="exporting opaque data as blob \"sha256:178cbff819e53fa371dc14aee52228e121c1d319e55594a06d1802d4e095c69e\"" time="2025-01-06T13:44:08-05:00" level=debug msg="Inspecting image 178cbff819e53fa371dc14aee52228e121c1d319e55594a06d1802d4e095c69e" time="2025-01-06T13:44:08-05:00" level=debug msg="Inspecting image 178cbff819e53fa371dc14aee52228e121c1d319e55594a06d1802d4e095c69e" time="2025-01-06T13:44:08-05:00" level=debug msg="using systemd mode: false" time="2025-01-06T13:44:08-05:00" level=debug msg="setting container name d2912d1edd53-infra" time="2025-01-06T13:44:08-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-01-06T13:44:08-05:00" level=debug msg="Allocated lock 1 for container 56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c" time="2025-01-06T13:44:08-05:00" level=debug msg="exporting opaque data as blob \"sha256:178cbff819e53fa371dc14aee52228e121c1d319e55594a06d1802d4e095c69e\"" time="2025-01-06T13:44:08-05:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are supported" time="2025-01-06T13:44:08-05:00" level=debug msg="Created container \"56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c\"" time="2025-01-06T13:44:08-05:00" level=debug msg="Container \"56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c\" has work directory \"/var/lib/containers/storage/overlay-containers/56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c/userdata\"" time="2025-01-06T13:44:08-05:00" level=debug msg="Container \"56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c\" has run directory \"/run/containers/storage/overlay-containers/56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c/userdata\"" time="2025-01-06T13:44:08-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-06T13:44:08-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-06T13:44:08-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-06T13:44:08-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-06T13:44:08-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-06T13:44:08-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-06T13:44:08-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-06T13:44:08-05:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2025-01-06T13:44:08-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-06T13:44:08-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-06T13:44:08-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-06T13:44:08-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-06T13:44:08-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-06T13:44:08-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-06T13:44:08-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-06T13:44:08-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-06T13:44:08-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-06T13:44:08-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-06T13:44:08-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-06T13:44:08-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-06T13:44:08-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-06T13:44:08-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-06T13:44:08-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-06T13:44:08-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-06T13:44:08-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-06T13:44:08-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-06T13:44:08-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-06T13:44:08-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-06T13:44:08-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-06T13:44:08-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-06T13:44:08-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-06T13:44:08-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-06T13:44:08-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-06T13:44:08-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-06T13:44:08-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-06T13:44:08-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-06T13:44:08-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-06T13:44:08-05:00" level=debug msg="using systemd mode: false" time="2025-01-06T13:44:08-05:00" level=debug msg="adding container to pod httpd2" time="2025-01-06T13:44:08-05:00" level=debug msg="setting container name httpd2-httpd2" time="2025-01-06T13:44:08-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-01-06T13:44:08-05:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2025-01-06T13:44:08-05:00" level=debug msg="Adding mount /proc" time="2025-01-06T13:44:08-05:00" level=debug msg="Adding mount /dev" time="2025-01-06T13:44:08-05:00" level=debug msg="Adding mount /dev/pts" time="2025-01-06T13:44:08-05:00" level=debug msg="Adding mount /dev/mqueue" time="2025-01-06T13:44:08-05:00" level=debug msg="Adding mount /sys" time="2025-01-06T13:44:08-05:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2025-01-06T13:44:08-05:00" level=debug msg="Allocated lock 2 for container 869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae" time="2025-01-06T13:44:08-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-06T13:44:08-05:00" level=debug msg="Created container \"869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae\"" time="2025-01-06T13:44:08-05:00" level=debug msg="Container \"869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae\" has work directory \"/var/lib/containers/storage/overlay-containers/869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae/userdata\"" time="2025-01-06T13:44:08-05:00" level=debug msg="Container \"869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae\" has run directory \"/run/containers/storage/overlay-containers/869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae/userdata\"" time="2025-01-06T13:44:08-05:00" level=debug msg="Strongconnecting node 56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c" time="2025-01-06T13:44:08-05:00" level=debug msg="Pushed 56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c onto stack" time="2025-01-06T13:44:08-05:00" level=debug msg="Finishing node 56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c. Popped 56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c off stack" time="2025-01-06T13:44:08-05:00" level=debug msg="Strongconnecting node 869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae" time="2025-01-06T13:44:08-05:00" level=debug msg="Pushed 869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae onto stack" time="2025-01-06T13:44:08-05:00" level=debug msg="Finishing node 869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae. Popped 869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae off stack" time="2025-01-06T13:44:08-05:00" level=debug msg="overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/G2RFZH6TVHMNCPFUYX4HHQ5UGD,upperdir=/var/lib/containers/storage/overlay/c3d607f36f0d429771136922f14db7ded660c88130323c9b1667737da003ab9f/diff,workdir=/var/lib/containers/storage/overlay/c3d607f36f0d429771136922f14db7ded660c88130323c9b1667737da003ab9f/work,nodev,metacopy=on,context=\"system_u:object_r:container_file_t:s0:c260,c325\"" time="2025-01-06T13:44:08-05:00" level=debug msg="Mounted container \"56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c\" at \"/var/lib/containers/storage/overlay/c3d607f36f0d429771136922f14db7ded660c88130323c9b1667737da003ab9f/merged\"" time="2025-01-06T13:44:08-05:00" level=debug msg="Created root filesystem for container 56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c at /var/lib/containers/storage/overlay/c3d607f36f0d429771136922f14db7ded660c88130323c9b1667737da003ab9f/merged" time="2025-01-06T13:44:08-05:00" level=debug msg="Made network namespace at /run/netns/netns-96a4e47e-185b-ede1-0aee-2798db047441 for container 56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c" [DEBUG netavark::network::validation] Validating network namespace... [DEBUG netavark::commands::setup] Setting up... [INFO netavark::firewall] Using nftables firewall driver [DEBUG netavark::network::bridge] Setup network podman-default-kube-network [DEBUG netavark::network::bridge] Container interface name: eth0 with IP addresses [10.89.0.2/24] [DEBUG netavark::network::bridge] Bridge name: podman1 with IP addresses [10.89.0.1/24] [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.ip_forward to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/podman1/rp_filter to 2 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv6/conf/eth0/autoconf to 0 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/arp_notify to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/rp_filter to 2 [INFO netavark::network::netlink] Adding route (dest: 0.0.0.0/0 ,gw: 10.89.0.1, metric 100) [DEBUG netavark::firewall::firewalld] Adding firewalld rules for network 10.89.0.0/24 [DEBUG netavark::firewall::firewalld] Adding subnet 10.89.0.0/24 to zone trusted as source [INFO netavark::firewall::nft] Creating container chain nv_ade42f93_10_89_0_0_nm24 [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.conf.podman1.route_localnet to 1 [DEBUG netavark::dns::aardvark] Spawning aardvark server [DEBUG netavark::dns::aardvark] start aardvark-dns: ["systemd-run", "-q", "--scope", "/usr/libexec/podman/aardvark-dns", "--config", "/run/containers/networks/aardvark-dns", "-p", "53", "run"] [DEBUG netavark::commands::setup] { "podman-default-kube-network": StatusBlock { dns_search_domains: Some( [ "dns.podman", ], ), dns_server_ips: Some( [ 10.89.0.1, ], ), interfaces: Some( { "eth0": NetInterface { mac_address: "46:5b:3b:fa:c6:64", subnets: Some( [ NetAddress { gateway: Some( 10.89.0.1, ), ipnet: 10.89.0.2/24, }, ], ), }, }, ), }, } [DEBUG netavark::commands::setup] Setup complete time="2025-01-06T13:44:08-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2025-01-06T13:44:08-05:00" level=debug msg="Setting Cgroups for container 56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c to machine-libpod_pod_d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c.slice:libpod:56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c" time="2025-01-06T13:44:08-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-01-06T13:44:08-05:00" level=debug msg="Workdir \"/\" resolved to host path \"/var/lib/containers/storage/overlay/c3d607f36f0d429771136922f14db7ded660c88130323c9b1667737da003ab9f/merged\"" time="2025-01-06T13:44:08-05:00" level=debug msg="Created OCI spec for container 56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c at /var/lib/containers/storage/overlay-containers/56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c/userdata/config.json" time="2025-01-06T13:44:08-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c.slice for parent machine.slice and name libpod_pod_d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c" time="2025-01-06T13:44:08-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c.slice" time="2025-01-06T13:44:08-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c.slice" time="2025-01-06T13:44:08-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-01-06T13:44:08-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c 56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c -u 56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c/userdata -p /run/containers/storage/overlay-containers/56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c/userdata/pidfile -n d2912d1edd53-infra --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c]" time="2025-01-06T13:44:08-05:00" level=info msg="Running conmon under slice machine-libpod_pod_d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c.slice and unitName libpod-conmon-56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c.scope" time="2025-01-06T13:44:08-05:00" level=debug msg="Received: 26598" time="2025-01-06T13:44:08-05:00" level=info msg="Got Conmon PID as 26596" time="2025-01-06T13:44:08-05:00" level=debug msg="Created container 56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c in OCI runtime" time="2025-01-06T13:44:08-05:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2025-01-06T13:44:08-05:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2025-01-06T13:44:08-05:00" level=debug msg="Starting container 56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c with command [/catatonit -P]" time="2025-01-06T13:44:08-05:00" level=debug msg="Started container 56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c" time="2025-01-06T13:44:08-05:00" level=debug msg="overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/WZHCCLRYTCGMF6LGDX5EZFIDHD,upperdir=/var/lib/containers/storage/overlay/ae825d06adf01c37e15f982f18d6d4d7086e16fd266421b3bd0b997b1e3952d9/diff,workdir=/var/lib/containers/storage/overlay/ae825d06adf01c37e15f982f18d6d4d7086e16fd266421b3bd0b997b1e3952d9/work,nodev,metacopy=on,context=\"system_u:object_r:container_file_t:s0:c260,c325\"" time="2025-01-06T13:44:08-05:00" level=debug msg="Mounted container \"869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae\" at \"/var/lib/containers/storage/overlay/ae825d06adf01c37e15f982f18d6d4d7086e16fd266421b3bd0b997b1e3952d9/merged\"" time="2025-01-06T13:44:08-05:00" level=debug msg="Created root filesystem for container 869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae at /var/lib/containers/storage/overlay/ae825d06adf01c37e15f982f18d6d4d7086e16fd266421b3bd0b997b1e3952d9/merged" time="2025-01-06T13:44:08-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2025-01-06T13:44:08-05:00" level=debug msg="Setting Cgroups for container 869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae to machine-libpod_pod_d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c.slice:libpod:869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae" time="2025-01-06T13:44:08-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-01-06T13:44:08-05:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2025-01-06T13:44:08-05:00" level=debug msg="Created OCI spec for container 869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae at /var/lib/containers/storage/overlay-containers/869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae/userdata/config.json" time="2025-01-06T13:44:08-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c.slice for parent machine.slice and name libpod_pod_d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c" time="2025-01-06T13:44:08-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c.slice" time="2025-01-06T13:44:08-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c.slice" time="2025-01-06T13:44:08-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-01-06T13:44:08-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c 869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae -u 869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae/userdata -p /run/containers/storage/overlay-containers/869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae/userdata/pidfile -n httpd2-httpd2 --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae]" time="2025-01-06T13:44:08-05:00" level=info msg="Running conmon under slice machine-libpod_pod_d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c.slice and unitName libpod-conmon-869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae.scope" time="2025-01-06T13:44:08-05:00" level=debug msg="Received: 26603" time="2025-01-06T13:44:08-05:00" level=info msg="Got Conmon PID as 26601" time="2025-01-06T13:44:08-05:00" level=debug msg="Created container 869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae in OCI runtime" time="2025-01-06T13:44:08-05:00" level=debug msg="Starting container 869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae with command [/bin/busybox-extras httpd -f -p 80]" time="2025-01-06T13:44:08-05:00" level=debug msg="Started container 869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae" time="2025-01-06T13:44:08-05:00" level=debug msg="Called kube.PersistentPostRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2025-01-06T13:44:08-05:00" level=debug msg="Shutting down engines" time="2025-01-06T13:44:08-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=26540 Jan 06 13:44:08 managed-node2 python3.12[26533]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Jan 06 13:44:09 managed-node2 python3.12[26735]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 06 13:44:09 managed-node2 systemd[1]: Reload requested from client PID 26736 ('systemctl') (unit session-5.scope)... Jan 06 13:44:09 managed-node2 systemd[1]: Reloading... Jan 06 13:44:09 managed-node2 systemd[1]: Reloading finished in 206 ms. Jan 06 13:44:09 managed-node2 systemd[1]: Starting fstrim.service - Discard unused blocks on filesystems from /etc/fstab... ░░ Subject: A start job for unit fstrim.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit fstrim.service has begun execution. ░░ ░░ The job identifier is 2111. Jan 06 13:44:09 managed-node2 systemd[1]: fstrim.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit fstrim.service has successfully entered the 'dead' state. Jan 06 13:44:09 managed-node2 systemd[1]: Finished fstrim.service - Discard unused blocks on filesystems from /etc/fstab. ░░ Subject: A start job for unit fstrim.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit fstrim.service has finished successfully. ░░ ░░ The job identifier is 2111. Jan 06 13:44:09 managed-node2 python3.12[26925]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Jan 06 13:44:09 managed-node2 systemd[1]: Reload requested from client PID 26928 ('systemctl') (unit session-5.scope)... Jan 06 13:44:09 managed-node2 systemd[1]: Reloading... Jan 06 13:44:10 managed-node2 systemd[1]: Reloading finished in 202 ms. Jan 06 13:44:10 managed-node2 systemd[1]: Starting logrotate.service - Rotate log files... ░░ Subject: A start job for unit logrotate.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has begun execution. ░░ ░░ The job identifier is 2189. Jan 06 13:44:10 managed-node2 systemd[1]: logrotate.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit logrotate.service has successfully entered the 'dead' state. Jan 06 13:44:10 managed-node2 systemd[1]: Finished logrotate.service - Rotate log files. ░░ Subject: A start job for unit logrotate.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has finished successfully. ░░ ░░ The job identifier is 2189. Jan 06 13:44:10 managed-node2 python3.12[27118]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 06 13:44:10 managed-node2 systemd[1]: Created slice system-podman\x2dkube.slice - Slice /system/podman-kube. ░░ Subject: A start job for unit system-podman\x2dkube.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit system-podman\x2dkube.slice has finished successfully. ░░ ░░ The job identifier is 2345. Jan 06 13:44:10 managed-node2 systemd[1]: Starting podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun execution. ░░ ░░ The job identifier is 2267. Jan 06 13:44:10 managed-node2 podman[27122]: 2025-01-06 13:44:10.823666661 -0500 EST m=+0.024880644 pod stop d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c (image=, name=httpd2) Jan 06 13:44:10 managed-node2 systemd[1]: libpod-56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c.scope has successfully entered the 'dead' state. Jan 06 13:44:10 managed-node2 podman[27122]: 2025-01-06 13:44:10.853274067 -0500 EST m=+0.054487982 container died 56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c (image=localhost/podman-pause:5.3.1-1733097600, name=d2912d1edd53-infra, io.buildah.version=1.38.0) Jan 06 13:44:10 managed-node2 aardvark-dns[26582]: Received SIGHUP Jan 06 13:44:10 managed-node2 aardvark-dns[26582]: Successfully parsed config Jan 06 13:44:10 managed-node2 aardvark-dns[26582]: Listen v4 ip {} Jan 06 13:44:10 managed-node2 aardvark-dns[26582]: Listen v6 ip {} Jan 06 13:44:10 managed-node2 aardvark-dns[26582]: No configuration found stopping the sever Jan 06 13:44:10 managed-node2 systemd[1]: run-r8cc6d90b84984f11ac2f9ff64ef8e701.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-r8cc6d90b84984f11ac2f9ff64ef8e701.scope has successfully entered the 'dead' state. Jan 06 13:44:10 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 06 13:44:10 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Jan 06 13:44:10 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Jan 06 13:44:10 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 06 13:44:10 managed-node2 /usr/bin/podman[27133]: time="2025-01-06T13:44:10-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only 56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c)" Jan 06 13:44:10 managed-node2 /usr/bin/podman[27133]: time="2025-01-06T13:44:10-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jan 06 13:44:10 managed-node2 /usr/bin/podman[27133]: time="2025-01-06T13:44:10-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jan 06 13:44:10 managed-node2 /usr/bin/podman[27133]: time="2025-01-06T13:44:10-05:00" level=info msg="Using sqlite as database backend" Jan 06 13:44:10 managed-node2 /usr/bin/podman[27133]: time="2025-01-06T13:44:10-05:00" level=debug msg="Using graph driver overlay" Jan 06 13:44:10 managed-node2 /usr/bin/podman[27133]: time="2025-01-06T13:44:10-05:00" level=debug msg="Using graph root /var/lib/containers/storage" Jan 06 13:44:10 managed-node2 /usr/bin/podman[27133]: time="2025-01-06T13:44:10-05:00" level=debug msg="Using run root /run/containers/storage" Jan 06 13:44:10 managed-node2 /usr/bin/podman[27133]: time="2025-01-06T13:44:10-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Jan 06 13:44:10 managed-node2 /usr/bin/podman[27133]: time="2025-01-06T13:44:10-05:00" level=debug msg="Using tmp dir /run/libpod" Jan 06 13:44:10 managed-node2 /usr/bin/podman[27133]: time="2025-01-06T13:44:10-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Jan 06 13:44:10 managed-node2 /usr/bin/podman[27133]: time="2025-01-06T13:44:10-05:00" level=debug msg="Using transient store: false" Jan 06 13:44:10 managed-node2 /usr/bin/podman[27133]: time="2025-01-06T13:44:10-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jan 06 13:44:10 managed-node2 /usr/bin/podman[27133]: time="2025-01-06T13:44:10-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 06 13:44:10 managed-node2 /usr/bin/podman[27133]: time="2025-01-06T13:44:10-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 06 13:44:10 managed-node2 /usr/bin/podman[27133]: time="2025-01-06T13:44:10-05:00" level=debug msg="Cached value indicated that metacopy is being used" Jan 06 13:44:10 managed-node2 /usr/bin/podman[27133]: time="2025-01-06T13:44:10-05:00" level=debug msg="Cached value indicated that native-diff is not being used" Jan 06 13:44:10 managed-node2 /usr/bin/podman[27133]: time="2025-01-06T13:44:10-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Jan 06 13:44:10 managed-node2 /usr/bin/podman[27133]: time="2025-01-06T13:44:10-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Jan 06 13:44:10 managed-node2 /usr/bin/podman[27133]: time="2025-01-06T13:44:10-05:00" level=debug msg="Initializing event backend journald" Jan 06 13:44:10 managed-node2 /usr/bin/podman[27133]: time="2025-01-06T13:44:10-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jan 06 13:44:10 managed-node2 /usr/bin/podman[27133]: time="2025-01-06T13:44:10-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jan 06 13:44:10 managed-node2 /usr/bin/podman[27133]: time="2025-01-06T13:44:10-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Jan 06 13:44:10 managed-node2 /usr/bin/podman[27133]: time="2025-01-06T13:44:10-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jan 06 13:44:10 managed-node2 /usr/bin/podman[27133]: time="2025-01-06T13:44:10-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jan 06 13:44:10 managed-node2 /usr/bin/podman[27133]: time="2025-01-06T13:44:10-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Jan 06 13:44:10 managed-node2 /usr/bin/podman[27133]: time="2025-01-06T13:44:10-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jan 06 13:44:10 managed-node2 /usr/bin/podman[27133]: time="2025-01-06T13:44:10-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jan 06 13:44:10 managed-node2 /usr/bin/podman[27133]: time="2025-01-06T13:44:10-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jan 06 13:44:10 managed-node2 /usr/bin/podman[27133]: time="2025-01-06T13:44:10-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Jan 06 13:44:10 managed-node2 /usr/bin/podman[27133]: time="2025-01-06T13:44:10-05:00" level=info msg="Setting parallel job count to 7" Jan 06 13:44:10 managed-node2 NetworkManager[778]: [1736189050.8962] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jan 06 13:44:10 managed-node2 systemd[1]: run-netns-netns\x2d96a4e47e\x2d185b\x2dede1\x2d0aee\x2d2798db047441.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2d96a4e47e\x2d185b\x2dede1\x2d0aee\x2d2798db047441.mount has successfully entered the 'dead' state. Jan 06 13:44:10 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c-userdata-shm.mount has successfully entered the 'dead' state. Jan 06 13:44:10 managed-node2 systemd[1]: var-lib-containers-storage-overlay-c3d607f36f0d429771136922f14db7ded660c88130323c9b1667737da003ab9f-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-c3d607f36f0d429771136922f14db7ded660c88130323c9b1667737da003ab9f-merged.mount has successfully entered the 'dead' state. Jan 06 13:44:10 managed-node2 podman[27122]: 2025-01-06 13:44:10.968295208 -0500 EST m=+0.169509238 container cleanup 56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c (image=localhost/podman-pause:5.3.1-1733097600, name=d2912d1edd53-infra, pod_id=d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c, io.buildah.version=1.38.0) Jan 06 13:44:10 managed-node2 /usr/bin/podman[27133]: time="2025-01-06T13:44:10-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only 56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c)" Jan 06 13:44:10 managed-node2 /usr/bin/podman[27133]: time="2025-01-06T13:44:10-05:00" level=debug msg="Shutting down engines" Jan 06 13:44:10 managed-node2 /usr/bin/podman[27133]: time="2025-01-06T13:44:10-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=27133 Jan 06 13:44:10 managed-node2 systemd[1]: libpod-conmon-56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c.scope has successfully entered the 'dead' state. Jan 06 13:44:20 managed-node2 podman[27122]: time="2025-01-06T13:44:20-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd2-httpd2 in 10 seconds, resorting to SIGKILL" Jan 06 13:44:20 managed-node2 conmon[26601]: conmon 869e90da600068fe4d7d : container 26603 exited with status 137 Jan 06 13:44:20 managed-node2 systemd[1]: libpod-869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae.scope has successfully entered the 'dead' state. Jan 06 13:44:20 managed-node2 conmon[26601]: conmon 869e90da600068fe4d7d : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c.slice/libpod-869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae.scope/container/memory.events Jan 06 13:44:20 managed-node2 podman[27122]: 2025-01-06 13:44:20.855760026 -0500 EST m=+10.056974110 container died 869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z) Jan 06 13:44:20 managed-node2 systemd[1]: var-lib-containers-storage-overlay-ae825d06adf01c37e15f982f18d6d4d7086e16fd266421b3bd0b997b1e3952d9-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-ae825d06adf01c37e15f982f18d6d4d7086e16fd266421b3bd0b997b1e3952d9-merged.mount has successfully entered the 'dead' state. Jan 06 13:44:20 managed-node2 /usr/bin/podman[27155]: time="2025-01-06T13:44:20-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only 869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae)" Jan 06 13:44:20 managed-node2 /usr/bin/podman[27155]: time="2025-01-06T13:44:20-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jan 06 13:44:20 managed-node2 /usr/bin/podman[27155]: time="2025-01-06T13:44:20-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jan 06 13:44:20 managed-node2 /usr/bin/podman[27155]: time="2025-01-06T13:44:20-05:00" level=info msg="Using sqlite as database backend" Jan 06 13:44:20 managed-node2 /usr/bin/podman[27155]: time="2025-01-06T13:44:20-05:00" level=debug msg="Using graph driver overlay" Jan 06 13:44:20 managed-node2 /usr/bin/podman[27155]: time="2025-01-06T13:44:20-05:00" level=debug msg="Using graph root /var/lib/containers/storage" Jan 06 13:44:20 managed-node2 /usr/bin/podman[27155]: time="2025-01-06T13:44:20-05:00" level=debug msg="Using run root /run/containers/storage" Jan 06 13:44:20 managed-node2 /usr/bin/podman[27155]: time="2025-01-06T13:44:20-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Jan 06 13:44:20 managed-node2 /usr/bin/podman[27155]: time="2025-01-06T13:44:20-05:00" level=debug msg="Using tmp dir /run/libpod" Jan 06 13:44:20 managed-node2 /usr/bin/podman[27155]: time="2025-01-06T13:44:20-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Jan 06 13:44:20 managed-node2 /usr/bin/podman[27155]: time="2025-01-06T13:44:20-05:00" level=debug msg="Using transient store: false" Jan 06 13:44:20 managed-node2 /usr/bin/podman[27155]: time="2025-01-06T13:44:20-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jan 06 13:44:20 managed-node2 /usr/bin/podman[27155]: time="2025-01-06T13:44:20-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 06 13:44:20 managed-node2 /usr/bin/podman[27155]: time="2025-01-06T13:44:20-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 06 13:44:20 managed-node2 /usr/bin/podman[27155]: time="2025-01-06T13:44:20-05:00" level=debug msg="Cached value indicated that metacopy is being used" Jan 06 13:44:20 managed-node2 /usr/bin/podman[27155]: time="2025-01-06T13:44:20-05:00" level=debug msg="Cached value indicated that native-diff is not being used" Jan 06 13:44:20 managed-node2 /usr/bin/podman[27155]: time="2025-01-06T13:44:20-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Jan 06 13:44:20 managed-node2 /usr/bin/podman[27155]: time="2025-01-06T13:44:20-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Jan 06 13:44:20 managed-node2 /usr/bin/podman[27155]: time="2025-01-06T13:44:20-05:00" level=debug msg="Initializing event backend journald" Jan 06 13:44:20 managed-node2 /usr/bin/podman[27155]: time="2025-01-06T13:44:20-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jan 06 13:44:20 managed-node2 /usr/bin/podman[27155]: time="2025-01-06T13:44:20-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jan 06 13:44:20 managed-node2 /usr/bin/podman[27155]: time="2025-01-06T13:44:20-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jan 06 13:44:20 managed-node2 /usr/bin/podman[27155]: time="2025-01-06T13:44:20-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jan 06 13:44:20 managed-node2 /usr/bin/podman[27155]: time="2025-01-06T13:44:20-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jan 06 13:44:20 managed-node2 /usr/bin/podman[27155]: time="2025-01-06T13:44:20-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Jan 06 13:44:20 managed-node2 /usr/bin/podman[27155]: time="2025-01-06T13:44:20-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Jan 06 13:44:20 managed-node2 /usr/bin/podman[27155]: time="2025-01-06T13:44:20-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jan 06 13:44:20 managed-node2 /usr/bin/podman[27155]: time="2025-01-06T13:44:20-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jan 06 13:44:20 managed-node2 /usr/bin/podman[27155]: time="2025-01-06T13:44:20-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Jan 06 13:44:20 managed-node2 /usr/bin/podman[27155]: time="2025-01-06T13:44:20-05:00" level=info msg="Setting parallel job count to 7" Jan 06 13:44:20 managed-node2 podman[27122]: 2025-01-06 13:44:20.889097587 -0500 EST m=+10.090311565 container cleanup 869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jan 06 13:44:20 managed-node2 /usr/bin/podman[27155]: time="2025-01-06T13:44:20-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only 869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae)" Jan 06 13:44:20 managed-node2 /usr/bin/podman[27155]: time="2025-01-06T13:44:20-05:00" level=debug msg="Shutting down engines" Jan 06 13:44:20 managed-node2 /usr/bin/podman[27155]: time="2025-01-06T13:44:20-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=27155 Jan 06 13:44:20 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 06 13:44:20 managed-node2 systemd[1]: libpod-conmon-869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae.scope has successfully entered the 'dead' state. Jan 06 13:44:20 managed-node2 systemd[1]: Stopped libpod-conmon-869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae.scope. ░░ Subject: A stop job for unit libpod-conmon-869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae.scope has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae.scope has finished. ░░ ░░ The job identifier is 2353 and the job result is done. Jan 06 13:44:20 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jan 06 13:44:20 managed-node2 systemd[1]: Removed slice machine-libpod_pod_d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c.slice - cgroup machine-libpod_pod_d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c.slice. ░░ Subject: A stop job for unit machine-libpod_pod_d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c.slice has finished. ░░ ░░ The job identifier is 2352 and the job result is done. Jan 06 13:44:20 managed-node2 podman[27122]: 2025-01-06 13:44:20.934740933 -0500 EST m=+10.135954969 pod stop d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c (image=, name=httpd2) Jan 06 13:44:20 managed-node2 systemd[1]: machine-libpod_pod_d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c.slice: Failed to open /run/systemd/transient/machine-libpod_pod_d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c.slice: No such file or directory Jan 06 13:44:20 managed-node2 podman[27122]: 2025-01-06 13:44:20.948753134 -0500 EST m=+10.149967042 pod stop d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c (image=, name=httpd2) Jan 06 13:44:20 managed-node2 systemd[1]: machine-libpod_pod_d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c.slice: Failed to open /run/systemd/transient/machine-libpod_pod_d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c.slice: No such file or directory Jan 06 13:44:20 managed-node2 podman[27122]: 2025-01-06 13:44:20.975459184 -0500 EST m=+10.176673092 container remove 869e90da600068fe4d7db58ab626b7c20d65df5b29349ea7d2f3415964259eae (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jan 06 13:44:20 managed-node2 podman[27122]: 2025-01-06 13:44:20.996264957 -0500 EST m=+10.197478868 container remove 56b53421f5e78853110b38f954f9fcb38cedfcfd001bfe0858d0be07ad83068c (image=localhost/podman-pause:5.3.1-1733097600, name=d2912d1edd53-infra, pod_id=d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c, io.buildah.version=1.38.0) Jan 06 13:44:20 managed-node2 systemd[1]: machine-libpod_pod_d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c.slice: Failed to open /run/systemd/transient/machine-libpod_pod_d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c.slice: No such file or directory Jan 06 13:44:21 managed-node2 podman[27122]: 2025-01-06 13:44:21.003713213 -0500 EST m=+10.204927083 pod remove d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c (image=, name=httpd2) Jan 06 13:44:21 managed-node2 podman[27122]: Pods stopped: Jan 06 13:44:21 managed-node2 podman[27122]: d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c Jan 06 13:44:21 managed-node2 podman[27122]: Pods removed: Jan 06 13:44:21 managed-node2 podman[27122]: d2912d1edd53622b54c9790b16193ddcc3abfa1415bde167184cbd8b9cf4fa4c Jan 06 13:44:21 managed-node2 podman[27122]: Secrets removed: Jan 06 13:44:21 managed-node2 podman[27122]: Volumes removed: Jan 06 13:44:21 managed-node2 podman[27122]: 2025-01-06 13:44:21.023609057 -0500 EST m=+10.224823117 container create 165acae01c4497fd7cb0b0fc9047f425e41b20a3cb23ada8c49b21a9498ba9c7 (image=localhost/podman-pause:5.3.1-1733097600, name=0ee319c08104-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jan 06 13:44:21 managed-node2 systemd[1]: Created slice machine-libpod_pod_ce8aeae07491ef171e2419d5fdccb2a7217a47b6016d5672a7934fc25f31d374.slice - cgroup machine-libpod_pod_ce8aeae07491ef171e2419d5fdccb2a7217a47b6016d5672a7934fc25f31d374.slice. ░░ Subject: A start job for unit machine-libpod_pod_ce8aeae07491ef171e2419d5fdccb2a7217a47b6016d5672a7934fc25f31d374.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_ce8aeae07491ef171e2419d5fdccb2a7217a47b6016d5672a7934fc25f31d374.slice has finished successfully. ░░ ░░ The job identifier is 2354. Jan 06 13:44:21 managed-node2 podman[27122]: 2025-01-06 13:44:21.127897331 -0500 EST m=+10.329111243 container create aefe2eb0d74a6b4ca9fa77a3ebb589a318080b781017be1c87ad3b74ba339474 (image=localhost/podman-pause:5.3.1-1733097600, name=ce8aeae07491-infra, pod_id=ce8aeae07491ef171e2419d5fdccb2a7217a47b6016d5672a7934fc25f31d374, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, io.buildah.version=1.38.0) Jan 06 13:44:21 managed-node2 podman[27122]: 2025-01-06 13:44:21.132928526 -0500 EST m=+10.334142426 pod create ce8aeae07491ef171e2419d5fdccb2a7217a47b6016d5672a7934fc25f31d374 (image=, name=httpd2) Jan 06 13:44:21 managed-node2 podman[27122]: 2025-01-06 13:44:21.1561621 -0500 EST m=+10.357376002 container create fb8f4012b2f4fa0f8fa47d0aa0512f5ee919ea91cca69b5dfeeb8310469a06ee (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=ce8aeae07491ef171e2419d5fdccb2a7217a47b6016d5672a7934fc25f31d374, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, created_at=2021-06-10T18:55:36Z) Jan 06 13:44:21 managed-node2 podman[27122]: 2025-01-06 13:44:21.156529998 -0500 EST m=+10.357743912 container restart 165acae01c4497fd7cb0b0fc9047f425e41b20a3cb23ada8c49b21a9498ba9c7 (image=localhost/podman-pause:5.3.1-1733097600, name=0ee319c08104-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jan 06 13:44:21 managed-node2 podman[27122]: 2025-01-06 13:44:21.134926929 -0500 EST m=+10.336140982 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 06 13:44:21 managed-node2 systemd[1]: Started libpod-165acae01c4497fd7cb0b0fc9047f425e41b20a3cb23ada8c49b21a9498ba9c7.scope - libcrun container. ░░ Subject: A start job for unit libpod-165acae01c4497fd7cb0b0fc9047f425e41b20a3cb23ada8c49b21a9498ba9c7.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-165acae01c4497fd7cb0b0fc9047f425e41b20a3cb23ada8c49b21a9498ba9c7.scope has finished successfully. ░░ ░░ The job identifier is 2360. Jan 06 13:44:21 managed-node2 podman[27122]: 2025-01-06 13:44:21.214137148 -0500 EST m=+10.415351102 container init 165acae01c4497fd7cb0b0fc9047f425e41b20a3cb23ada8c49b21a9498ba9c7 (image=localhost/podman-pause:5.3.1-1733097600, name=0ee319c08104-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jan 06 13:44:21 managed-node2 podman[27122]: 2025-01-06 13:44:21.216467545 -0500 EST m=+10.417681626 container start 165acae01c4497fd7cb0b0fc9047f425e41b20a3cb23ada8c49b21a9498ba9c7 (image=localhost/podman-pause:5.3.1-1733097600, name=0ee319c08104-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jan 06 13:44:21 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 06 13:44:21 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 06 13:44:21 managed-node2 kernel: veth0: entered allmulticast mode Jan 06 13:44:21 managed-node2 kernel: veth0: entered promiscuous mode Jan 06 13:44:21 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 06 13:44:21 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Jan 06 13:44:21 managed-node2 NetworkManager[778]: [1736189061.2351] device (podman1): carrier: link connected Jan 06 13:44:21 managed-node2 NetworkManager[778]: [1736189061.2354] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/5) Jan 06 13:44:21 managed-node2 NetworkManager[778]: [1736189061.2381] device (veth0): carrier: link connected Jan 06 13:44:21 managed-node2 NetworkManager[778]: [1736189061.2384] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/6) Jan 06 13:44:21 managed-node2 (udev-worker)[27176]: Network interface NamePolicy= disabled on kernel command line. Jan 06 13:44:21 managed-node2 (udev-worker)[27175]: Network interface NamePolicy= disabled on kernel command line. Jan 06 13:44:21 managed-node2 NetworkManager[778]: [1736189061.3087] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 06 13:44:21 managed-node2 NetworkManager[778]: [1736189061.3092] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 06 13:44:21 managed-node2 NetworkManager[778]: [1736189061.3098] device (podman1): Activation: starting connection 'podman1' (ac6ed1cb-03a8-480b-b125-4c539910d6cd) Jan 06 13:44:21 managed-node2 NetworkManager[778]: [1736189061.3099] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 06 13:44:21 managed-node2 NetworkManager[778]: [1736189061.3101] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 06 13:44:21 managed-node2 NetworkManager[778]: [1736189061.3103] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 06 13:44:21 managed-node2 NetworkManager[778]: [1736189061.3105] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 06 13:44:21 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2367. Jan 06 13:44:21 managed-node2 systemd[1]: Started run-r1e1cd8c3d5fa416f8897d998b9736d01.scope - /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-r1e1cd8c3d5fa416f8897d998b9736d01.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-r1e1cd8c3d5fa416f8897d998b9736d01.scope has finished successfully. ░░ ░░ The job identifier is 2446. Jan 06 13:44:21 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2367. Jan 06 13:44:21 managed-node2 NetworkManager[778]: [1736189061.3382] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 06 13:44:21 managed-node2 NetworkManager[778]: [1736189061.3386] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 06 13:44:21 managed-node2 NetworkManager[778]: [1736189061.3391] device (podman1): Activation: successful, device activated. Jan 06 13:44:21 managed-node2 systemd[1]: Started libpod-aefe2eb0d74a6b4ca9fa77a3ebb589a318080b781017be1c87ad3b74ba339474.scope - libcrun container. ░░ Subject: A start job for unit libpod-aefe2eb0d74a6b4ca9fa77a3ebb589a318080b781017be1c87ad3b74ba339474.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-aefe2eb0d74a6b4ca9fa77a3ebb589a318080b781017be1c87ad3b74ba339474.scope has finished successfully. ░░ ░░ The job identifier is 2452. Jan 06 13:44:21 managed-node2 podman[27122]: 2025-01-06 13:44:21.370994083 -0500 EST m=+10.572208036 container init aefe2eb0d74a6b4ca9fa77a3ebb589a318080b781017be1c87ad3b74ba339474 (image=localhost/podman-pause:5.3.1-1733097600, name=ce8aeae07491-infra, pod_id=ce8aeae07491ef171e2419d5fdccb2a7217a47b6016d5672a7934fc25f31d374, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jan 06 13:44:21 managed-node2 podman[27122]: 2025-01-06 13:44:21.37325424 -0500 EST m=+10.574468251 container start aefe2eb0d74a6b4ca9fa77a3ebb589a318080b781017be1c87ad3b74ba339474 (image=localhost/podman-pause:5.3.1-1733097600, name=ce8aeae07491-infra, pod_id=ce8aeae07491ef171e2419d5fdccb2a7217a47b6016d5672a7934fc25f31d374, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, io.buildah.version=1.38.0) Jan 06 13:44:21 managed-node2 systemd[1]: Started libpod-fb8f4012b2f4fa0f8fa47d0aa0512f5ee919ea91cca69b5dfeeb8310469a06ee.scope - libcrun container. ░░ Subject: A start job for unit libpod-fb8f4012b2f4fa0f8fa47d0aa0512f5ee919ea91cca69b5dfeeb8310469a06ee.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-fb8f4012b2f4fa0f8fa47d0aa0512f5ee919ea91cca69b5dfeeb8310469a06ee.scope has finished successfully. ░░ ░░ The job identifier is 2459. Jan 06 13:44:21 managed-node2 podman[27122]: 2025-01-06 13:44:21.412907494 -0500 EST m=+10.614122931 container init fb8f4012b2f4fa0f8fa47d0aa0512f5ee919ea91cca69b5dfeeb8310469a06ee (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=ce8aeae07491ef171e2419d5fdccb2a7217a47b6016d5672a7934fc25f31d374, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jan 06 13:44:21 managed-node2 podman[27122]: 2025-01-06 13:44:21.415223019 -0500 EST m=+10.616437096 container start fb8f4012b2f4fa0f8fa47d0aa0512f5ee919ea91cca69b5dfeeb8310469a06ee (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=ce8aeae07491ef171e2419d5fdccb2a7217a47b6016d5672a7934fc25f31d374, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jan 06 13:44:21 managed-node2 podman[27122]: 2025-01-06 13:44:21.419466502 -0500 EST m=+10.620680409 pod start ce8aeae07491ef171e2419d5fdccb2a7217a47b6016d5672a7934fc25f31d374 (image=, name=httpd2) Jan 06 13:44:21 managed-node2 podman[27122]: Pod: Jan 06 13:44:21 managed-node2 podman[27122]: ce8aeae07491ef171e2419d5fdccb2a7217a47b6016d5672a7934fc25f31d374 Jan 06 13:44:21 managed-node2 podman[27122]: Container: Jan 06 13:44:21 managed-node2 podman[27122]: fb8f4012b2f4fa0f8fa47d0aa0512f5ee919ea91cca69b5dfeeb8310469a06ee Jan 06 13:44:21 managed-node2 systemd[1]: Started podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished successfully. ░░ ░░ The job identifier is 2267. Jan 06 13:44:22 managed-node2 python3.12[27356]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 06 13:44:23 managed-node2 python3.12[27489]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd3.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:44:24 managed-node2 python3.12[27621]: ansible-file Invoked with path=/tmp/lsr__vern8jf_podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 06 13:44:24 managed-node2 python3.12[27752]: ansible-file Invoked with path=/tmp/lsr__vern8jf_podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 06 13:44:26 managed-node2 podman[27914]: 2025-01-06 13:44:26.3106078 -0500 EST m=+0.923637675 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 06 13:44:26 managed-node2 python3.12[28058]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 06 13:44:27 managed-node2 python3.12[28189]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 06 13:44:27 managed-node2 python3.12[28320]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 06 13:44:27 managed-node2 python3.12[28425]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd3.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736189067.3038528-16085-20041728166165/.source.yml _original_basename=.c71ej9ui follow=False checksum=c542a9083a848d56371807c0b096b9807e0cfe95 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 06 13:44:28 managed-node2 python3.12[28556]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 06 13:44:28 managed-node2 systemd[1]: Created slice machine-libpod_pod_17eccda7a62ca7a19d4bd9d8f3cb9665e6b46d6b781b54170d2cf3755b50baff.slice - cgroup machine-libpod_pod_17eccda7a62ca7a19d4bd9d8f3cb9665e6b46d6b781b54170d2cf3755b50baff.slice. ░░ Subject: A start job for unit machine-libpod_pod_17eccda7a62ca7a19d4bd9d8f3cb9665e6b46d6b781b54170d2cf3755b50baff.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_17eccda7a62ca7a19d4bd9d8f3cb9665e6b46d6b781b54170d2cf3755b50baff.slice has finished successfully. ░░ ░░ The job identifier is 2466. Jan 06 13:44:28 managed-node2 podman[28563]: 2025-01-06 13:44:28.38509505 -0500 EST m=+0.057813678 container create e6dc0d49464512fd17e79a0b50c8004c096e4b4a3ac35182e2abc7606fe805d1 (image=localhost/podman-pause:5.3.1-1733097600, name=17eccda7a62c-infra, pod_id=17eccda7a62ca7a19d4bd9d8f3cb9665e6b46d6b781b54170d2cf3755b50baff, io.buildah.version=1.38.0) Jan 06 13:44:28 managed-node2 podman[28563]: 2025-01-06 13:44:28.389476346 -0500 EST m=+0.062194909 pod create 17eccda7a62ca7a19d4bd9d8f3cb9665e6b46d6b781b54170d2cf3755b50baff (image=, name=httpd3) Jan 06 13:44:28 managed-node2 podman[28563]: 2025-01-06 13:44:28.412792644 -0500 EST m=+0.085511291 container create c42b6b16d50acc718e1d3bcf02e7e75249a5c04c7c9353bd29cf0ea4dbc8d59f (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=17eccda7a62ca7a19d4bd9d8f3cb9665e6b46d6b781b54170d2cf3755b50baff, io.containers.autoupdate=registry, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, app=test) Jan 06 13:44:28 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Jan 06 13:44:28 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Jan 06 13:44:28 managed-node2 kernel: veth1: entered allmulticast mode Jan 06 13:44:28 managed-node2 kernel: veth1: entered promiscuous mode Jan 06 13:44:28 managed-node2 podman[28563]: 2025-01-06 13:44:28.392276563 -0500 EST m=+0.064995355 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 06 13:44:28 managed-node2 NetworkManager[778]: [1736189068.4449] manager: (veth1): new Veth device (/org/freedesktop/NetworkManager/Devices/7) Jan 06 13:44:28 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Jan 06 13:44:28 managed-node2 kernel: podman1: port 2(veth1) entered forwarding state Jan 06 13:44:28 managed-node2 NetworkManager[778]: [1736189068.4514] device (veth1): carrier: link connected Jan 06 13:44:28 managed-node2 (udev-worker)[28581]: Network interface NamePolicy= disabled on kernel command line. Jan 06 13:44:28 managed-node2 systemd[1]: Started libpod-conmon-e6dc0d49464512fd17e79a0b50c8004c096e4b4a3ac35182e2abc7606fe805d1.scope. ░░ Subject: A start job for unit libpod-conmon-e6dc0d49464512fd17e79a0b50c8004c096e4b4a3ac35182e2abc7606fe805d1.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-e6dc0d49464512fd17e79a0b50c8004c096e4b4a3ac35182e2abc7606fe805d1.scope has finished successfully. ░░ ░░ The job identifier is 2473. Jan 06 13:44:28 managed-node2 systemd[1]: Started libpod-e6dc0d49464512fd17e79a0b50c8004c096e4b4a3ac35182e2abc7606fe805d1.scope - libcrun container. ░░ Subject: A start job for unit libpod-e6dc0d49464512fd17e79a0b50c8004c096e4b4a3ac35182e2abc7606fe805d1.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-e6dc0d49464512fd17e79a0b50c8004c096e4b4a3ac35182e2abc7606fe805d1.scope has finished successfully. ░░ ░░ The job identifier is 2480. Jan 06 13:44:28 managed-node2 podman[28563]: 2025-01-06 13:44:28.549054731 -0500 EST m=+0.221773388 container init e6dc0d49464512fd17e79a0b50c8004c096e4b4a3ac35182e2abc7606fe805d1 (image=localhost/podman-pause:5.3.1-1733097600, name=17eccda7a62c-infra, pod_id=17eccda7a62ca7a19d4bd9d8f3cb9665e6b46d6b781b54170d2cf3755b50baff, io.buildah.version=1.38.0) Jan 06 13:44:28 managed-node2 podman[28563]: 2025-01-06 13:44:28.552930262 -0500 EST m=+0.225648879 container start e6dc0d49464512fd17e79a0b50c8004c096e4b4a3ac35182e2abc7606fe805d1 (image=localhost/podman-pause:5.3.1-1733097600, name=17eccda7a62c-infra, pod_id=17eccda7a62ca7a19d4bd9d8f3cb9665e6b46d6b781b54170d2cf3755b50baff, io.buildah.version=1.38.0) Jan 06 13:44:28 managed-node2 systemd[1]: Started libpod-conmon-c42b6b16d50acc718e1d3bcf02e7e75249a5c04c7c9353bd29cf0ea4dbc8d59f.scope. ░░ Subject: A start job for unit libpod-conmon-c42b6b16d50acc718e1d3bcf02e7e75249a5c04c7c9353bd29cf0ea4dbc8d59f.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-c42b6b16d50acc718e1d3bcf02e7e75249a5c04c7c9353bd29cf0ea4dbc8d59f.scope has finished successfully. ░░ ░░ The job identifier is 2487. Jan 06 13:44:28 managed-node2 systemd[1]: Started libpod-c42b6b16d50acc718e1d3bcf02e7e75249a5c04c7c9353bd29cf0ea4dbc8d59f.scope - libcrun container. ░░ Subject: A start job for unit libpod-c42b6b16d50acc718e1d3bcf02e7e75249a5c04c7c9353bd29cf0ea4dbc8d59f.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-c42b6b16d50acc718e1d3bcf02e7e75249a5c04c7c9353bd29cf0ea4dbc8d59f.scope has finished successfully. ░░ ░░ The job identifier is 2494. Jan 06 13:44:28 managed-node2 podman[28563]: 2025-01-06 13:44:28.677912428 -0500 EST m=+0.350631094 container init c42b6b16d50acc718e1d3bcf02e7e75249a5c04c7c9353bd29cf0ea4dbc8d59f (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=17eccda7a62ca7a19d4bd9d8f3cb9665e6b46d6b781b54170d2cf3755b50baff, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jan 06 13:44:28 managed-node2 podman[28563]: 2025-01-06 13:44:28.680439378 -0500 EST m=+0.353158027 container start c42b6b16d50acc718e1d3bcf02e7e75249a5c04c7c9353bd29cf0ea4dbc8d59f (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=17eccda7a62ca7a19d4bd9d8f3cb9665e6b46d6b781b54170d2cf3755b50baff, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jan 06 13:44:28 managed-node2 podman[28563]: 2025-01-06 13:44:28.688765569 -0500 EST m=+0.361484129 pod start 17eccda7a62ca7a19d4bd9d8f3cb9665e6b46d6b781b54170d2cf3755b50baff (image=, name=httpd3) Jan 06 13:44:29 managed-node2 python3.12[28743]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 06 13:44:29 managed-node2 systemd[1]: Reload requested from client PID 28744 ('systemctl') (unit session-5.scope)... Jan 06 13:44:29 managed-node2 systemd[1]: Reloading... Jan 06 13:44:29 managed-node2 systemd[1]: Reloading finished in 211 ms. Jan 06 13:44:30 managed-node2 python3.12[28930]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Jan 06 13:44:30 managed-node2 systemd[1]: Reload requested from client PID 28933 ('systemctl') (unit session-5.scope)... Jan 06 13:44:30 managed-node2 systemd[1]: Reloading... Jan 06 13:44:30 managed-node2 systemd[1]: Reloading finished in 214 ms. Jan 06 13:44:30 managed-node2 python3.12[29119]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 06 13:44:31 managed-node2 systemd[1]: Starting podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun execution. ░░ ░░ The job identifier is 2501. Jan 06 13:44:31 managed-node2 podman[29123]: 2025-01-06 13:44:31.055659005 -0500 EST m=+0.024047657 pod stop 17eccda7a62ca7a19d4bd9d8f3cb9665e6b46d6b781b54170d2cf3755b50baff (image=, name=httpd3) Jan 06 13:44:31 managed-node2 systemd[1]: libpod-e6dc0d49464512fd17e79a0b50c8004c096e4b4a3ac35182e2abc7606fe805d1.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-e6dc0d49464512fd17e79a0b50c8004c096e4b4a3ac35182e2abc7606fe805d1.scope has successfully entered the 'dead' state. Jan 06 13:44:31 managed-node2 podman[29123]: 2025-01-06 13:44:31.083590787 -0500 EST m=+0.051979320 container died e6dc0d49464512fd17e79a0b50c8004c096e4b4a3ac35182e2abc7606fe805d1 (image=localhost/podman-pause:5.3.1-1733097600, name=17eccda7a62c-infra, io.buildah.version=1.38.0) Jan 06 13:44:31 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Jan 06 13:44:31 managed-node2 kernel: veth1 (unregistering): left allmulticast mode Jan 06 13:44:31 managed-node2 kernel: veth1 (unregistering): left promiscuous mode Jan 06 13:44:31 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Jan 06 13:44:31 managed-node2 systemd[1]: run-netns-netns\x2d8f26f9ee\x2d7bac\x2d9495\x2d881b\x2d97fbc84941e6.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2d8f26f9ee\x2d7bac\x2d9495\x2d881b\x2d97fbc84941e6.mount has successfully entered the 'dead' state. Jan 06 13:44:31 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-e6dc0d49464512fd17e79a0b50c8004c096e4b4a3ac35182e2abc7606fe805d1-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-e6dc0d49464512fd17e79a0b50c8004c096e4b4a3ac35182e2abc7606fe805d1-userdata-shm.mount has successfully entered the 'dead' state. Jan 06 13:44:31 managed-node2 systemd[1]: var-lib-containers-storage-overlay-5ccb9ce65ffda3bebf0d141a06875eecd236dd71df7c84d00c5dab9f80175e2d-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-5ccb9ce65ffda3bebf0d141a06875eecd236dd71df7c84d00c5dab9f80175e2d-merged.mount has successfully entered the 'dead' state. Jan 06 13:44:31 managed-node2 podman[29123]: 2025-01-06 13:44:31.157700389 -0500 EST m=+0.126088918 container cleanup e6dc0d49464512fd17e79a0b50c8004c096e4b4a3ac35182e2abc7606fe805d1 (image=localhost/podman-pause:5.3.1-1733097600, name=17eccda7a62c-infra, pod_id=17eccda7a62ca7a19d4bd9d8f3cb9665e6b46d6b781b54170d2cf3755b50baff, io.buildah.version=1.38.0) Jan 06 13:44:31 managed-node2 systemd[1]: libpod-conmon-e6dc0d49464512fd17e79a0b50c8004c096e4b4a3ac35182e2abc7606fe805d1.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-e6dc0d49464512fd17e79a0b50c8004c096e4b4a3ac35182e2abc7606fe805d1.scope has successfully entered the 'dead' state. Jan 06 13:44:31 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jan 06 13:44:41 managed-node2 podman[29123]: time="2025-01-06T13:44:41-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd3-httpd3 in 10 seconds, resorting to SIGKILL" Jan 06 13:44:41 managed-node2 systemd[1]: libpod-c42b6b16d50acc718e1d3bcf02e7e75249a5c04c7c9353bd29cf0ea4dbc8d59f.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-c42b6b16d50acc718e1d3bcf02e7e75249a5c04c7c9353bd29cf0ea4dbc8d59f.scope has successfully entered the 'dead' state. Jan 06 13:44:41 managed-node2 podman[29123]: 2025-01-06 13:44:41.087538092 -0500 EST m=+10.055926735 container died c42b6b16d50acc718e1d3bcf02e7e75249a5c04c7c9353bd29cf0ea4dbc8d59f (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jan 06 13:44:41 managed-node2 systemd[1]: var-lib-containers-storage-overlay-af860106bc4e508e7eab94e13270ecdfb0acb07ce2ff9ef6afb1692ff4de0f3e-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-af860106bc4e508e7eab94e13270ecdfb0acb07ce2ff9ef6afb1692ff4de0f3e-merged.mount has successfully entered the 'dead' state. Jan 06 13:44:41 managed-node2 podman[29123]: 2025-01-06 13:44:41.120460879 -0500 EST m=+10.088849588 container cleanup c42b6b16d50acc718e1d3bcf02e7e75249a5c04c7c9353bd29cf0ea4dbc8d59f (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=17eccda7a62ca7a19d4bd9d8f3cb9665e6b46d6b781b54170d2cf3755b50baff, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z) Jan 06 13:44:41 managed-node2 systemd[1]: Stopping libpod-conmon-c42b6b16d50acc718e1d3bcf02e7e75249a5c04c7c9353bd29cf0ea4dbc8d59f.scope... ░░ Subject: A stop job for unit libpod-conmon-c42b6b16d50acc718e1d3bcf02e7e75249a5c04c7c9353bd29cf0ea4dbc8d59f.scope has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-c42b6b16d50acc718e1d3bcf02e7e75249a5c04c7c9353bd29cf0ea4dbc8d59f.scope has begun execution. ░░ ░░ The job identifier is 2587. Jan 06 13:44:41 managed-node2 systemd[1]: libpod-conmon-c42b6b16d50acc718e1d3bcf02e7e75249a5c04c7c9353bd29cf0ea4dbc8d59f.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-c42b6b16d50acc718e1d3bcf02e7e75249a5c04c7c9353bd29cf0ea4dbc8d59f.scope has successfully entered the 'dead' state. Jan 06 13:44:41 managed-node2 systemd[1]: Stopped libpod-conmon-c42b6b16d50acc718e1d3bcf02e7e75249a5c04c7c9353bd29cf0ea4dbc8d59f.scope. ░░ Subject: A stop job for unit libpod-conmon-c42b6b16d50acc718e1d3bcf02e7e75249a5c04c7c9353bd29cf0ea4dbc8d59f.scope has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-c42b6b16d50acc718e1d3bcf02e7e75249a5c04c7c9353bd29cf0ea4dbc8d59f.scope has finished. ░░ ░░ The job identifier is 2587 and the job result is done. Jan 06 13:44:41 managed-node2 systemd[1]: Removed slice machine-libpod_pod_17eccda7a62ca7a19d4bd9d8f3cb9665e6b46d6b781b54170d2cf3755b50baff.slice - cgroup machine-libpod_pod_17eccda7a62ca7a19d4bd9d8f3cb9665e6b46d6b781b54170d2cf3755b50baff.slice. ░░ Subject: A stop job for unit machine-libpod_pod_17eccda7a62ca7a19d4bd9d8f3cb9665e6b46d6b781b54170d2cf3755b50baff.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_17eccda7a62ca7a19d4bd9d8f3cb9665e6b46d6b781b54170d2cf3755b50baff.slice has finished. ░░ ░░ The job identifier is 2586 and the job result is done. Jan 06 13:44:41 managed-node2 podman[29123]: 2025-01-06 13:44:41.147443363 -0500 EST m=+10.115831893 container remove c42b6b16d50acc718e1d3bcf02e7e75249a5c04c7c9353bd29cf0ea4dbc8d59f (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=17eccda7a62ca7a19d4bd9d8f3cb9665e6b46d6b781b54170d2cf3755b50baff, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test) Jan 06 13:44:41 managed-node2 podman[29123]: 2025-01-06 13:44:41.167695492 -0500 EST m=+10.136084036 container remove e6dc0d49464512fd17e79a0b50c8004c096e4b4a3ac35182e2abc7606fe805d1 (image=localhost/podman-pause:5.3.1-1733097600, name=17eccda7a62c-infra, pod_id=17eccda7a62ca7a19d4bd9d8f3cb9665e6b46d6b781b54170d2cf3755b50baff, io.buildah.version=1.38.0) Jan 06 13:44:41 managed-node2 systemd[1]: machine-libpod_pod_17eccda7a62ca7a19d4bd9d8f3cb9665e6b46d6b781b54170d2cf3755b50baff.slice: Failed to open /run/systemd/transient/machine-libpod_pod_17eccda7a62ca7a19d4bd9d8f3cb9665e6b46d6b781b54170d2cf3755b50baff.slice: No such file or directory Jan 06 13:44:41 managed-node2 podman[29123]: 2025-01-06 13:44:41.175559645 -0500 EST m=+10.143948145 pod remove 17eccda7a62ca7a19d4bd9d8f3cb9665e6b46d6b781b54170d2cf3755b50baff (image=, name=httpd3) Jan 06 13:44:41 managed-node2 podman[29123]: Pods stopped: Jan 06 13:44:41 managed-node2 podman[29123]: 17eccda7a62ca7a19d4bd9d8f3cb9665e6b46d6b781b54170d2cf3755b50baff Jan 06 13:44:41 managed-node2 podman[29123]: Pods removed: Jan 06 13:44:41 managed-node2 podman[29123]: 17eccda7a62ca7a19d4bd9d8f3cb9665e6b46d6b781b54170d2cf3755b50baff Jan 06 13:44:41 managed-node2 podman[29123]: Secrets removed: Jan 06 13:44:41 managed-node2 podman[29123]: Volumes removed: Jan 06 13:44:41 managed-node2 podman[29123]: 2025-01-06 13:44:41.196440323 -0500 EST m=+10.164828911 container create d161320bca5e4e22fb66f65654ddee10df25f1ff781884c4d6edec9fc7faa75d (image=localhost/podman-pause:5.3.1-1733097600, name=1345b7736131-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jan 06 13:44:41 managed-node2 systemd[1]: Created slice machine-libpod_pod_ea72115d828abb22edf2284ebf59ec38d00b4c22e47e369e0e41325474511e9e.slice - cgroup machine-libpod_pod_ea72115d828abb22edf2284ebf59ec38d00b4c22e47e369e0e41325474511e9e.slice. ░░ Subject: A start job for unit machine-libpod_pod_ea72115d828abb22edf2284ebf59ec38d00b4c22e47e369e0e41325474511e9e.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_ea72115d828abb22edf2284ebf59ec38d00b4c22e47e369e0e41325474511e9e.slice has finished successfully. ░░ ░░ The job identifier is 2588. Jan 06 13:44:41 managed-node2 podman[29123]: 2025-01-06 13:44:41.228622367 -0500 EST m=+10.197010961 container create b6ada9a42cbef9423caa6b299ae1df1968dd3bdbd47b5795f4b38df4da019d0a (image=localhost/podman-pause:5.3.1-1733097600, name=ea72115d828a-infra, pod_id=ea72115d828abb22edf2284ebf59ec38d00b4c22e47e369e0e41325474511e9e, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, io.buildah.version=1.38.0) Jan 06 13:44:41 managed-node2 podman[29123]: 2025-01-06 13:44:41.2331254 -0500 EST m=+10.201513913 pod create ea72115d828abb22edf2284ebf59ec38d00b4c22e47e369e0e41325474511e9e (image=, name=httpd3) Jan 06 13:44:41 managed-node2 podman[29123]: 2025-01-06 13:44:41.253855958 -0500 EST m=+10.222244559 container create 16cf55b7148e035d456f91df366a81e14c9b91fffc2b85a33a259d7f795da252 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=ea72115d828abb22edf2284ebf59ec38d00b4c22e47e369e0e41325474511e9e, app=test, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jan 06 13:44:41 managed-node2 podman[29123]: 2025-01-06 13:44:41.254495394 -0500 EST m=+10.222884030 container restart d161320bca5e4e22fb66f65654ddee10df25f1ff781884c4d6edec9fc7faa75d (image=localhost/podman-pause:5.3.1-1733097600, name=1345b7736131-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jan 06 13:44:41 managed-node2 systemd[1]: Started libpod-d161320bca5e4e22fb66f65654ddee10df25f1ff781884c4d6edec9fc7faa75d.scope - libcrun container. ░░ Subject: A start job for unit libpod-d161320bca5e4e22fb66f65654ddee10df25f1ff781884c4d6edec9fc7faa75d.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-d161320bca5e4e22fb66f65654ddee10df25f1ff781884c4d6edec9fc7faa75d.scope has finished successfully. ░░ ░░ The job identifier is 2594. Jan 06 13:44:41 managed-node2 podman[29123]: 2025-01-06 13:44:41.235114487 -0500 EST m=+10.203503173 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 06 13:44:41 managed-node2 podman[29123]: 2025-01-06 13:44:41.373154364 -0500 EST m=+10.341543067 container init d161320bca5e4e22fb66f65654ddee10df25f1ff781884c4d6edec9fc7faa75d (image=localhost/podman-pause:5.3.1-1733097600, name=1345b7736131-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jan 06 13:44:41 managed-node2 podman[29123]: 2025-01-06 13:44:41.376446249 -0500 EST m=+10.344834984 container start d161320bca5e4e22fb66f65654ddee10df25f1ff781884c4d6edec9fc7faa75d (image=localhost/podman-pause:5.3.1-1733097600, name=1345b7736131-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jan 06 13:44:41 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Jan 06 13:44:41 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Jan 06 13:44:41 managed-node2 kernel: veth1: entered allmulticast mode Jan 06 13:44:41 managed-node2 kernel: veth1: entered promiscuous mode Jan 06 13:44:41 managed-node2 NetworkManager[778]: [1736189081.4038] manager: (veth1): new Veth device (/org/freedesktop/NetworkManager/Devices/8) Jan 06 13:44:41 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Jan 06 13:44:41 managed-node2 kernel: podman1: port 2(veth1) entered forwarding state Jan 06 13:44:41 managed-node2 NetworkManager[778]: [1736189081.4077] device (veth1): carrier: link connected Jan 06 13:44:41 managed-node2 (udev-worker)[29171]: Network interface NamePolicy= disabled on kernel command line. Jan 06 13:44:41 managed-node2 systemd[1]: Started libpod-b6ada9a42cbef9423caa6b299ae1df1968dd3bdbd47b5795f4b38df4da019d0a.scope - libcrun container. ░░ Subject: A start job for unit libpod-b6ada9a42cbef9423caa6b299ae1df1968dd3bdbd47b5795f4b38df4da019d0a.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-b6ada9a42cbef9423caa6b299ae1df1968dd3bdbd47b5795f4b38df4da019d0a.scope has finished successfully. ░░ ░░ The job identifier is 2601. Jan 06 13:44:41 managed-node2 podman[29123]: 2025-01-06 13:44:41.488335491 -0500 EST m=+10.456724084 container init b6ada9a42cbef9423caa6b299ae1df1968dd3bdbd47b5795f4b38df4da019d0a (image=localhost/podman-pause:5.3.1-1733097600, name=ea72115d828a-infra, pod_id=ea72115d828abb22edf2284ebf59ec38d00b4c22e47e369e0e41325474511e9e, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, io.buildah.version=1.38.0) Jan 06 13:44:41 managed-node2 podman[29123]: 2025-01-06 13:44:41.490824849 -0500 EST m=+10.459213567 container start b6ada9a42cbef9423caa6b299ae1df1968dd3bdbd47b5795f4b38df4da019d0a (image=localhost/podman-pause:5.3.1-1733097600, name=ea72115d828a-infra, pod_id=ea72115d828abb22edf2284ebf59ec38d00b4c22e47e369e0e41325474511e9e, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, io.buildah.version=1.38.0) Jan 06 13:44:41 managed-node2 systemd[1]: Started libpod-16cf55b7148e035d456f91df366a81e14c9b91fffc2b85a33a259d7f795da252.scope - libcrun container. ░░ Subject: A start job for unit libpod-16cf55b7148e035d456f91df366a81e14c9b91fffc2b85a33a259d7f795da252.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-16cf55b7148e035d456f91df366a81e14c9b91fffc2b85a33a259d7f795da252.scope has finished successfully. ░░ ░░ The job identifier is 2608. Jan 06 13:44:41 managed-node2 podman[29123]: 2025-01-06 13:44:41.538300122 -0500 EST m=+10.506688700 container init 16cf55b7148e035d456f91df366a81e14c9b91fffc2b85a33a259d7f795da252 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=ea72115d828abb22edf2284ebf59ec38d00b4c22e47e369e0e41325474511e9e, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jan 06 13:44:41 managed-node2 podman[29123]: 2025-01-06 13:44:41.540860215 -0500 EST m=+10.509248880 container start 16cf55b7148e035d456f91df366a81e14c9b91fffc2b85a33a259d7f795da252 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=ea72115d828abb22edf2284ebf59ec38d00b4c22e47e369e0e41325474511e9e, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jan 06 13:44:41 managed-node2 podman[29123]: 2025-01-06 13:44:41.545186447 -0500 EST m=+10.513574974 pod start ea72115d828abb22edf2284ebf59ec38d00b4c22e47e369e0e41325474511e9e (image=, name=httpd3) Jan 06 13:44:41 managed-node2 systemd[1]: Started podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished successfully. ░░ ░░ The job identifier is 2501. Jan 06 13:44:41 managed-node2 podman[29123]: Pod: Jan 06 13:44:41 managed-node2 podman[29123]: ea72115d828abb22edf2284ebf59ec38d00b4c22e47e369e0e41325474511e9e Jan 06 13:44:41 managed-node2 podman[29123]: Container: Jan 06 13:44:41 managed-node2 podman[29123]: 16cf55b7148e035d456f91df366a81e14c9b91fffc2b85a33a259d7f795da252 Jan 06 13:44:42 managed-node2 sudo[29375]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gbcivvlpeihyzdeoqfznyjspqzsjfqsr ; /usr/bin/python3.12 /var/tmp/ansible-tmp-1736189082.0818632-16561-20375065442829/AnsiballZ_command.py' Jan 06 13:44:42 managed-node2 sudo[29375]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-29375) opened. Jan 06 13:44:42 managed-node2 sudo[29375]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 06 13:44:42 managed-node2 python3.12[29378]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:44:42 managed-node2 systemd[23127]: Started podman-29385.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 112. Jan 06 13:44:42 managed-node2 sudo[29375]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 06 13:44:43 managed-node2 python3.12[29526]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:44:43 managed-node2 python3.12[29665]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:44:44 managed-node2 sudo[29846]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-seggffihjtoqotwqovbfzkzaazfjugkp ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736189083.763046-16637-258797416598362/AnsiballZ_command.py' Jan 06 13:44:44 managed-node2 sudo[29846]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-29846) opened. Jan 06 13:44:44 managed-node2 sudo[29846]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 06 13:44:44 managed-node2 python3.12[29849]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --user list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd1[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:44:44 managed-node2 sudo[29846]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 06 13:44:44 managed-node2 python3.12[29983]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd2[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:44:44 managed-node2 python3.12[30117]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd3[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:44:45 managed-node2 python3.12[30251]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15001/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 06 13:44:46 managed-node2 python3.12[30384]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15002/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 06 13:44:46 managed-node2 python3.12[30515]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:44:47 managed-node2 python3.12[30647]: ansible-file Invoked with path=/etc/containers/storage.conf state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 06 13:44:47 managed-node2 python3.12[30778]: ansible-file Invoked with path=/tmp/lsr__vern8jf_podman state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 06 13:44:51 managed-node2 python3.12[30952]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jan 06 13:44:52 managed-node2 python3.12[31125]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 06 13:44:53 managed-node2 python3.12[31256]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 06 13:44:55 managed-node2 python3.12[31518]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:44:57 managed-node2 python3.12[31655]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jan 06 13:44:58 managed-node2 python3.12[31787]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 06 13:45:00 managed-node2 python3.12[31920]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 06 13:45:02 managed-node2 python3.12[32053]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 06 13:45:03 managed-node2 python3.12[32184]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-pod-pod.pod follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 06 13:45:03 managed-node2 python3.12[32289]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736189103.0489454-17620-77377462417765/.source.pod dest=/etc/containers/systemd/quadlet-pod-pod.pod owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=1884c880482430d8bf2e944b003734fb8b7a462d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 06 13:45:04 managed-node2 python3.12[32420]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 06 13:45:04 managed-node2 systemd[1]: Reload requested from client PID 32421 ('systemctl') (unit session-5.scope)... Jan 06 13:45:04 managed-node2 systemd[1]: Reloading... Jan 06 13:45:04 managed-node2 systemd[1]: Reloading finished in 226 ms. Jan 06 13:45:05 managed-node2 python3.12[32606]: ansible-systemd Invoked with name=quadlet-pod-pod-pod.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 06 13:45:05 managed-node2 systemd[1]: Starting quadlet-pod-pod-pod.service... ░░ Subject: A start job for unit quadlet-pod-pod-pod.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-pod-pod.service has begun execution. ░░ ░░ The job identifier is 2615. Jan 06 13:45:05 managed-node2 systemd[1]: Created slice machine-libpod_pod_1f760fc9d855ff708975a51c50533b08336b84a95131aaebdda963d92fd52090.slice - cgroup machine-libpod_pod_1f760fc9d855ff708975a51c50533b08336b84a95131aaebdda963d92fd52090.slice. ░░ Subject: A start job for unit machine-libpod_pod_1f760fc9d855ff708975a51c50533b08336b84a95131aaebdda963d92fd52090.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_1f760fc9d855ff708975a51c50533b08336b84a95131aaebdda963d92fd52090.slice has finished successfully. ░░ ░░ The job identifier is 2699. Jan 06 13:45:05 managed-node2 podman[32610]: 2025-01-06 13:45:05.696405931 -0500 EST m=+0.060183546 container create a6a3908e6216e9d64eaaf80cf4e238adc1ac5a86557f6d7d29e3bcb37faea524 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=1f760fc9d855ff708975a51c50533b08336b84a95131aaebdda963d92fd52090, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 06 13:45:05 managed-node2 podman[32610]: 2025-01-06 13:45:05.700976209 -0500 EST m=+0.064753698 pod create 1f760fc9d855ff708975a51c50533b08336b84a95131aaebdda963d92fd52090 (image=, name=quadlet-pod) Jan 06 13:45:05 managed-node2 quadlet-pod-pod-pod[32610]: 1f760fc9d855ff708975a51c50533b08336b84a95131aaebdda963d92fd52090 Jan 06 13:45:05 managed-node2 NetworkManager[778]: [1736189105.7582] manager: (podman0): new Bridge device (/org/freedesktop/NetworkManager/Devices/9) Jan 06 13:45:05 managed-node2 kernel: podman0: port 1(veth2) entered blocking state Jan 06 13:45:05 managed-node2 kernel: podman0: port 1(veth2) entered disabled state Jan 06 13:45:05 managed-node2 kernel: veth2: entered allmulticast mode Jan 06 13:45:05 managed-node2 kernel: veth2: entered promiscuous mode Jan 06 13:45:05 managed-node2 kernel: podman0: port 1(veth2) entered blocking state Jan 06 13:45:05 managed-node2 kernel: podman0: port 1(veth2) entered forwarding state Jan 06 13:45:05 managed-node2 NetworkManager[778]: [1736189105.7742] manager: (veth2): new Veth device (/org/freedesktop/NetworkManager/Devices/10) Jan 06 13:45:05 managed-node2 NetworkManager[778]: [1736189105.7767] device (veth2): carrier: link connected Jan 06 13:45:05 managed-node2 NetworkManager[778]: [1736189105.7776] device (podman0): carrier: link connected Jan 06 13:45:05 managed-node2 (udev-worker)[32633]: Network interface NamePolicy= disabled on kernel command line. Jan 06 13:45:05 managed-node2 (udev-worker)[32634]: Network interface NamePolicy= disabled on kernel command line. Jan 06 13:45:05 managed-node2 NetworkManager[778]: [1736189105.8484] device (podman0): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 06 13:45:05 managed-node2 NetworkManager[778]: [1736189105.8489] device (podman0): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 06 13:45:05 managed-node2 NetworkManager[778]: [1736189105.8494] device (podman0): Activation: starting connection 'podman0' (35dc9bfe-d00e-4c7a-948c-bb2513e43537) Jan 06 13:45:05 managed-node2 NetworkManager[778]: [1736189105.8496] device (podman0): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 06 13:45:05 managed-node2 NetworkManager[778]: [1736189105.8497] device (podman0): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 06 13:45:05 managed-node2 NetworkManager[778]: [1736189105.8500] device (podman0): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 06 13:45:05 managed-node2 NetworkManager[778]: [1736189105.8502] device (podman0): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 06 13:45:05 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2706. Jan 06 13:45:05 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2706. Jan 06 13:45:05 managed-node2 NetworkManager[778]: [1736189105.8750] device (podman0): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 06 13:45:05 managed-node2 NetworkManager[778]: [1736189105.8760] device (podman0): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 06 13:45:05 managed-node2 NetworkManager[778]: [1736189105.8765] device (podman0): Activation: successful, device activated. Jan 06 13:45:05 managed-node2 systemd[1]: Started libpod-a6a3908e6216e9d64eaaf80cf4e238adc1ac5a86557f6d7d29e3bcb37faea524.scope - libcrun container. ░░ Subject: A start job for unit libpod-a6a3908e6216e9d64eaaf80cf4e238adc1ac5a86557f6d7d29e3bcb37faea524.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-a6a3908e6216e9d64eaaf80cf4e238adc1ac5a86557f6d7d29e3bcb37faea524.scope has finished successfully. ░░ ░░ The job identifier is 2785. Jan 06 13:45:05 managed-node2 podman[32619]: 2025-01-06 13:45:05.918181829 -0500 EST m=+0.199020822 container init a6a3908e6216e9d64eaaf80cf4e238adc1ac5a86557f6d7d29e3bcb37faea524 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=1f760fc9d855ff708975a51c50533b08336b84a95131aaebdda963d92fd52090, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 06 13:45:05 managed-node2 podman[32619]: 2025-01-06 13:45:05.921319074 -0500 EST m=+0.202158412 container start a6a3908e6216e9d64eaaf80cf4e238adc1ac5a86557f6d7d29e3bcb37faea524 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=1f760fc9d855ff708975a51c50533b08336b84a95131aaebdda963d92fd52090, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 06 13:45:05 managed-node2 podman[32619]: 2025-01-06 13:45:05.926561452 -0500 EST m=+0.207400415 pod start 1f760fc9d855ff708975a51c50533b08336b84a95131aaebdda963d92fd52090 (image=, name=quadlet-pod) Jan 06 13:45:05 managed-node2 quadlet-pod-pod-pod[32619]: quadlet-pod Jan 06 13:45:05 managed-node2 systemd[1]: Started quadlet-pod-pod-pod.service. ░░ Subject: A start job for unit quadlet-pod-pod-pod.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-pod-pod.service has finished successfully. ░░ ░░ The job identifier is 2615. Jan 06 13:45:07 managed-node2 python3.12[32806]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 06 13:45:10 managed-node2 podman[32968]: 2025-01-06 13:45:10.426541149 -0500 EST m=+1.578756448 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 06 13:45:10 managed-node2 python3.12[33114]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 06 13:45:11 managed-node2 python3.12[33245]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-pod-container.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 06 13:45:11 managed-node2 python3.12[33350]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736189111.1390615-17972-280895023868873/.source.container dest=/etc/containers/systemd/quadlet-pod-container.container owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=f0b5c8159fc3c65bf9310a371751609e4c1ba4c3 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 06 13:45:12 managed-node2 python3.12[33481]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 06 13:45:12 managed-node2 systemd[1]: Reload requested from client PID 33482 ('systemctl') (unit session-5.scope)... Jan 06 13:45:12 managed-node2 systemd[1]: Reloading... Jan 06 13:45:12 managed-node2 systemd[1]: Reloading finished in 225 ms. Jan 06 13:45:13 managed-node2 python3.12[33668]: ansible-systemd Invoked with name=quadlet-pod-container.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 06 13:45:13 managed-node2 systemd[1]: Starting quadlet-pod-container.service... ░░ Subject: A start job for unit quadlet-pod-container.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-container.service has begun execution. ░░ ░░ The job identifier is 2792. Jan 06 13:45:13 managed-node2 podman[33672]: 2025-01-06 13:45:13.307274977 -0500 EST m=+0.046720482 container create 5b47883bef44d3eb9b4823d123502276bc4b13bb88330fc9ef67e816c6c5db72 (image=quay.io/libpod/testimage:20210610, name=quadlet-pod-container, pod_id=1f760fc9d855ff708975a51c50533b08336b84a95131aaebdda963d92fd52090, io.buildah.version=1.21.0, PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jan 06 13:45:13 managed-node2 podman[33672]: 2025-01-06 13:45:13.35734463 -0500 EST m=+0.096790195 container init 5b47883bef44d3eb9b4823d123502276bc4b13bb88330fc9ef67e816c6c5db72 (image=quay.io/libpod/testimage:20210610, name=quadlet-pod-container, pod_id=1f760fc9d855ff708975a51c50533b08336b84a95131aaebdda963d92fd52090, created_by=test/system/build-testimage, io.buildah.version=1.21.0, PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service, created_at=2021-06-10T18:55:36Z) Jan 06 13:45:13 managed-node2 podman[33672]: 2025-01-06 13:45:13.3599585 -0500 EST m=+0.099404302 container start 5b47883bef44d3eb9b4823d123502276bc4b13bb88330fc9ef67e816c6c5db72 (image=quay.io/libpod/testimage:20210610, name=quadlet-pod-container, pod_id=1f760fc9d855ff708975a51c50533b08336b84a95131aaebdda963d92fd52090, created_by=test/system/build-testimage, io.buildah.version=1.21.0, PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service, created_at=2021-06-10T18:55:36Z) Jan 06 13:45:13 managed-node2 systemd[1]: Started quadlet-pod-container.service. ░░ Subject: A start job for unit quadlet-pod-container.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-container.service has finished successfully. ░░ ░░ The job identifier is 2792. Jan 06 13:45:13 managed-node2 quadlet-pod-container[33672]: 5b47883bef44d3eb9b4823d123502276bc4b13bb88330fc9ef67e816c6c5db72 Jan 06 13:45:13 managed-node2 podman[33672]: 2025-01-06 13:45:13.282457438 -0500 EST m=+0.021903080 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 06 13:45:14 managed-node2 python3.12[33817]: ansible-ansible.legacy.command Invoked with _raw_params=cat /etc/containers/systemd/quadlet-pod-container.container _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:45:15 managed-node2 python3.12[33949]: ansible-ansible.legacy.command Invoked with _raw_params=cat /etc/containers/systemd/quadlet-pod-pod.pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:45:15 managed-node2 python3.12[34081]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect quadlet-pod --format '{{range .Containers}}{{.Name}} {{end}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:45:15 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jan 06 13:45:16 managed-node2 python3.12[34221]: ansible-user Invoked with name=user_quadlet_pod uid=2223 state=present non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node2 update_password=always group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Jan 06 13:45:16 managed-node2 useradd[34223]: new group: name=user_quadlet_pod, GID=2223 Jan 06 13:45:16 managed-node2 useradd[34223]: new user: name=user_quadlet_pod, UID=2223, GID=2223, home=/home/user_quadlet_pod, shell=/bin/bash, from=/dev/pts/0 Jan 06 13:45:16 managed-node2 rsyslogd[661]: imjournal: journal files changed, reloading... [v8.2408.0-2.el10 try https://www.rsyslog.com/e/0 ] Jan 06 13:45:18 managed-node2 python3.12[34486]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:45:19 managed-node2 python3.12[34623]: ansible-getent Invoked with database=passwd key=user_quadlet_pod fail_key=False service=None split=None Jan 06 13:45:20 managed-node2 python3.12[34755]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 06 13:45:20 managed-node2 python3.12[34888]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:45:21 managed-node2 python3.12[35020]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:45:24 managed-node2 python3.12[35152]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 06 13:45:24 managed-node2 python3.12[35285]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:45:25 managed-node2 python3.12[35417]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:45:26 managed-node2 python3.12[35549]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/user_quadlet_pod _raw_params=loginctl enable-linger user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jan 06 13:45:26 managed-node2 systemd[1]: Created slice user-2223.slice - User Slice of UID 2223. ░░ Subject: A start job for unit user-2223.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-2223.slice has finished successfully. ░░ ░░ The job identifier is 2878. Jan 06 13:45:26 managed-node2 systemd[1]: Starting user-runtime-dir@2223.service - User Runtime Directory /run/user/2223... ░░ Subject: A start job for unit user-runtime-dir@2223.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@2223.service has begun execution. ░░ ░░ The job identifier is 2877. Jan 06 13:45:26 managed-node2 systemd[1]: Finished user-runtime-dir@2223.service - User Runtime Directory /run/user/2223. ░░ Subject: A start job for unit user-runtime-dir@2223.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@2223.service has finished successfully. ░░ ░░ The job identifier is 2877. Jan 06 13:45:26 managed-node2 systemd[1]: Starting user@2223.service - User Manager for UID 2223... ░░ Subject: A start job for unit user@2223.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@2223.service has begun execution. ░░ ░░ The job identifier is 2957. Jan 06 13:45:26 managed-node2 systemd-logind[662]: New session 7 of user user_quadlet_pod. ░░ Subject: A new session 7 has been created for user user_quadlet_pod ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 7 has been created for the user user_quadlet_pod. ░░ ░░ The leading process of the session is 35553. Jan 06 13:45:26 managed-node2 (systemd)[35553]: pam_unix(systemd-user:session): session opened for user user_quadlet_pod(uid=2223) by user_quadlet_pod(uid=0) Jan 06 13:45:26 managed-node2 systemd[35553]: Queued start job for default target default.target. Jan 06 13:45:26 managed-node2 systemd[35553]: Created slice app.slice - User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 5. Jan 06 13:45:26 managed-node2 systemd[35553]: Started grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 11. Jan 06 13:45:26 managed-node2 systemd[35553]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 10. Jan 06 13:45:26 managed-node2 systemd[35553]: Reached target paths.target - Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Jan 06 13:45:26 managed-node2 systemd[35553]: Reached target timers.target - Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 9. Jan 06 13:45:26 managed-node2 systemd[35553]: Starting dbus.socket - D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 4. Jan 06 13:45:26 managed-node2 systemd[35553]: Starting systemd-tmpfiles-setup.service - Create User Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 8. Jan 06 13:45:26 managed-node2 systemd[35553]: Finished systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 8. Jan 06 13:45:26 managed-node2 systemd[35553]: Listening on dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 4. Jan 06 13:45:26 managed-node2 systemd[35553]: Reached target sockets.target - Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Jan 06 13:45:26 managed-node2 systemd[35553]: Reached target basic.target - Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Jan 06 13:45:26 managed-node2 systemd[35553]: Reached target default.target - Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Jan 06 13:45:26 managed-node2 systemd[35553]: Startup finished in 92ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 2223 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 92932 microseconds. Jan 06 13:45:26 managed-node2 systemd[1]: Started user@2223.service - User Manager for UID 2223. ░░ Subject: A start job for unit user@2223.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@2223.service has finished successfully. ░░ ░░ The job identifier is 2957. Jan 06 13:45:27 managed-node2 python3.12[35699]: ansible-file Invoked with path=/home/user_quadlet_pod/.config/containers/systemd state=directory owner=user_quadlet_pod group=2223 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 06 13:45:27 managed-node2 python3.12[35830]: ansible-ansible.legacy.stat Invoked with path=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 06 13:45:27 managed-node2 python3.12[35935]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736189127.2591572-18736-229058402969634/.source.pod dest=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod owner=user_quadlet_pod group=2223 mode=0644 follow=False _original_basename=systemd.j2 checksum=1884c880482430d8bf2e944b003734fb8b7a462d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 06 13:45:28 managed-node2 sudo[36108]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dbjtqzsywkpezfgxflofsmrvkwybnhha ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736189128.017088-18784-179594950140291/AnsiballZ_systemd.py' Jan 06 13:45:28 managed-node2 sudo[36108]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-36108) opened. Jan 06 13:45:28 managed-node2 sudo[36108]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jan 06 13:45:28 managed-node2 python3.12[36111]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 06 13:45:28 managed-node2 python3.12[36111]: ansible-systemd [WARNING] Module remote_tmp /home/user_quadlet_pod/.ansible/tmp did not exist and was created with a mode of 0700, this may cause issues when running as another user. To avoid this, create the remote_tmp dir with the correct permissions manually Jan 06 13:45:28 managed-node2 systemd[35553]: Reload requested from client PID 36112 ('systemctl')... Jan 06 13:45:28 managed-node2 systemd[35553]: Reloading... Jan 06 13:45:28 managed-node2 systemd[35553]: Reloading finished in 58 ms. Jan 06 13:45:28 managed-node2 sudo[36108]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jan 06 13:45:28 managed-node2 sudo[36295]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qtaqvrdaabsifsxlfhuppvrfnrgkkatm ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736189128.709347-18814-253883249268501/AnsiballZ_systemd.py' Jan 06 13:45:28 managed-node2 sudo[36295]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-36295) opened. Jan 06 13:45:28 managed-node2 sudo[36295]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jan 06 13:45:29 managed-node2 python3.12[36298]: ansible-systemd Invoked with name=quadlet-pod-pod-pod.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 06 13:45:29 managed-node2 systemd[35553]: Starting podman-user-wait-network-online.service - Wait for system level network-online.target as user.... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 25. Jan 06 13:45:29 managed-node2 sh[36302]: active Jan 06 13:45:29 managed-node2 systemd[35553]: Finished podman-user-wait-network-online.service - Wait for system level network-online.target as user.. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 25. Jan 06 13:45:29 managed-node2 systemd[35553]: Starting quadlet-pod-pod-pod.service... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Jan 06 13:45:29 managed-node2 systemd[35553]: Created slice session.slice - User Core Session Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 27. Jan 06 13:45:29 managed-node2 systemd[35553]: Starting dbus-broker.service - D-Bus User Message Bus... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 26. Jan 06 13:45:29 managed-node2 dbus-broker-launch[36368]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Jan 06 13:45:29 managed-node2 dbus-broker-launch[36368]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Jan 06 13:45:29 managed-node2 systemd[35553]: Started dbus-broker.service - D-Bus User Message Bus. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 26. Jan 06 13:45:29 managed-node2 dbus-broker-launch[36368]: Ready Jan 06 13:45:29 managed-node2 systemd[35553]: Created slice user.slice - Slice /user. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 33. Jan 06 13:45:29 managed-node2 systemd[35553]: Created slice user-libpod_pod_ee1aa9ed4b4dea627a3d833cd7d20d16c5f47ae6d4d7a0f824af071a396c9a63.slice - cgroup user-libpod_pod_ee1aa9ed4b4dea627a3d833cd7d20d16c5f47ae6d4d7a0f824af071a396c9a63.slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 32. Jan 06 13:45:29 managed-node2 quadlet-pod-pod-pod[36311]: ee1aa9ed4b4dea627a3d833cd7d20d16c5f47ae6d4d7a0f824af071a396c9a63 Jan 06 13:45:29 managed-node2 systemd[35553]: Started podman-pause-f492329d.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 36. Jan 06 13:45:29 managed-node2 systemd[35553]: Started libpod-45f73cfbd704c0598973d807d66afa1da95af9a0cbdbe2fc017535243aa7472b.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 40. Jan 06 13:45:29 managed-node2 quadlet-pod-pod-pod[36370]: quadlet-pod Jan 06 13:45:29 managed-node2 systemd[35553]: Started quadlet-pod-pod-pod.service. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Jan 06 13:45:29 managed-node2 sudo[36295]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jan 06 13:45:30 managed-node2 python3.12[36526]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 06 13:45:31 managed-node2 python3.12[36659]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:45:31 managed-node2 python3.12[36791]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:45:33 managed-node2 python3.12[36923]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/user_quadlet_pod _raw_params=loginctl enable-linger user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jan 06 13:45:34 managed-node2 sudo[37096]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tbjvrugszrxxhkoclpothcpvfuiougug ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736189133.808137-19023-12806006126743/AnsiballZ_podman_image.py' Jan 06 13:45:34 managed-node2 sudo[37096]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-37096) opened. Jan 06 13:45:34 managed-node2 sudo[37096]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jan 06 13:45:34 managed-node2 systemd[35553]: Started podman-37100.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 45. Jan 06 13:45:34 managed-node2 systemd[35553]: Started podman-37107.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 49. Jan 06 13:45:35 managed-node2 systemd[35553]: Started podman-37131.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 53. Jan 06 13:45:35 managed-node2 sudo[37096]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jan 06 13:45:35 managed-node2 python3.12[37269]: ansible-file Invoked with path=/home/user_quadlet_pod/.config/containers/systemd state=directory owner=user_quadlet_pod group=2223 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 06 13:45:36 managed-node2 python3.12[37400]: ansible-ansible.legacy.stat Invoked with path=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 06 13:45:36 managed-node2 python3.12[37505]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736189136.065039-19111-268270576847091/.source.container dest=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container owner=user_quadlet_pod group=2223 mode=0644 follow=False _original_basename=systemd.j2 checksum=f0b5c8159fc3c65bf9310a371751609e4c1ba4c3 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 06 13:45:37 managed-node2 sudo[37678]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gxsaysaegsehhjeysmvcelyptdczqevt ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736189136.824838-19158-232368784058642/AnsiballZ_systemd.py' Jan 06 13:45:37 managed-node2 sudo[37678]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-37678) opened. Jan 06 13:45:37 managed-node2 sudo[37678]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jan 06 13:45:37 managed-node2 python3.12[37681]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 06 13:45:37 managed-node2 systemd[35553]: Reload requested from client PID 37682 ('systemctl')... Jan 06 13:45:37 managed-node2 systemd[35553]: Reloading... Jan 06 13:45:37 managed-node2 systemd[35553]: Reloading finished in 66 ms. Jan 06 13:45:37 managed-node2 sudo[37678]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jan 06 13:45:37 managed-node2 sudo[37864]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-eoxajhphrakakarrpuyyeilxkgltvhkw ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736189137.4941432-19196-277066607543087/AnsiballZ_systemd.py' Jan 06 13:45:37 managed-node2 sudo[37864]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-37864) opened. Jan 06 13:45:37 managed-node2 sudo[37864]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jan 06 13:45:37 managed-node2 python3.12[37867]: ansible-systemd Invoked with name=quadlet-pod-container.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 06 13:45:37 managed-node2 systemd[35553]: Starting quadlet-pod-container.service... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 57. Jan 06 13:45:38 managed-node2 systemd[35553]: Started quadlet-pod-container.service. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 57. Jan 06 13:45:38 managed-node2 quadlet-pod-container[37870]: d57b38b53b0f8174d24608d822b530e1f6592020fa40efe38453a4da416d44f1 Jan 06 13:45:38 managed-node2 sudo[37864]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jan 06 13:45:38 managed-node2 python3.12[38015]: ansible-ansible.legacy.command Invoked with _raw_params=cat /home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:45:39 managed-node2 python3.12[38147]: ansible-ansible.legacy.command Invoked with _raw_params=cat /home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:45:39 managed-node2 sudo[38321]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ssviuvfjsmmuceqkaffcmdggmbpnktyh ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736189139.214725-19285-40525634089478/AnsiballZ_command.py' Jan 06 13:45:39 managed-node2 sudo[38321]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-38321) opened. Jan 06 13:45:39 managed-node2 sudo[38321]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jan 06 13:45:39 managed-node2 python3.12[38324]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect quadlet-pod --format '{{range .Containers}}{{.Name}} {{end}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:45:39 managed-node2 systemd[35553]: Started podman-38325.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 71. Jan 06 13:45:39 managed-node2 sudo[38321]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jan 06 13:45:40 managed-node2 python3.12[38463]: ansible-stat Invoked with path=/var/lib/systemd/linger/user_quadlet_pod follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 06 13:45:42 managed-node2 python3.12[38727]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:45:43 managed-node2 python3.12[38864]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 06 13:45:43 managed-node2 python3.12[38997]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:45:44 managed-node2 python3.12[39129]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:45:46 managed-node2 python3.12[39261]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 06 13:45:47 managed-node2 python3.12[39394]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:45:47 managed-node2 python3.12[39526]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:45:48 managed-node2 python3.12[39658]: ansible-stat Invoked with path=/run/user/2223 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 06 13:45:49 managed-node2 sudo[39833]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jzdhlctabsagfjzwpdrexfhwlgxkiivz ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736189148.8269289-19708-272937034553328/AnsiballZ_systemd.py' Jan 06 13:45:49 managed-node2 sudo[39833]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-39833) opened. Jan 06 13:45:49 managed-node2 sudo[39833]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jan 06 13:45:49 managed-node2 python3.12[39836]: ansible-systemd Invoked with name=quadlet-pod-container.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 06 13:45:49 managed-node2 systemd[35553]: Reload requested from client PID 39839 ('systemctl')... Jan 06 13:45:49 managed-node2 systemd[35553]: Reloading... Jan 06 13:45:49 managed-node2 systemd[35553]: Reloading finished in 65 ms. Jan 06 13:45:49 managed-node2 systemd[35553]: Stopping quadlet-pod-container.service... ░░ Subject: A stop job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has begun execution. ░░ ░░ The job identifier is 75. Jan 06 13:45:59 managed-node2 quadlet-pod-container[39851]: time="2025-01-06T13:45:59-05:00" level=warning msg="StopSignal SIGTERM failed to stop container quadlet-pod-container in 10 seconds, resorting to SIGKILL" Jan 06 13:45:59 managed-node2 quadlet-pod-container[39851]: d57b38b53b0f8174d24608d822b530e1f6592020fa40efe38453a4da416d44f1 Jan 06 13:45:59 managed-node2 systemd[35553]: quadlet-pod-container.service: Main process exited, code=exited, status=137/n/a ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit UNIT has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 137. Jan 06 13:45:59 managed-node2 systemd[35553]: Removed slice user-libpod_pod_ee1aa9ed4b4dea627a3d833cd7d20d16c5f47ae6d4d7a0f824af071a396c9a63.slice - cgroup user-libpod_pod_ee1aa9ed4b4dea627a3d833cd7d20d16c5f47ae6d4d7a0f824af071a396c9a63.slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 76 and the job result is done. Jan 06 13:45:59 managed-node2 systemd[35553]: user-libpod_pod_ee1aa9ed4b4dea627a3d833cd7d20d16c5f47ae6d4d7a0f824af071a396c9a63.slice: Failed to open /run/user/2223/systemd/transient/user-libpod_pod_ee1aa9ed4b4dea627a3d833cd7d20d16c5f47ae6d4d7a0f824af071a396c9a63.slice: No such file or directory Jan 06 13:45:59 managed-node2 systemd[35553]: quadlet-pod-container.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit UNIT has entered the 'failed' state with result 'exit-code'. Jan 06 13:45:59 managed-node2 systemd[35553]: Stopped quadlet-pod-container.service. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 75 and the job result is done. Jan 06 13:45:59 managed-node2 systemd[35553]: user-libpod_pod_ee1aa9ed4b4dea627a3d833cd7d20d16c5f47ae6d4d7a0f824af071a396c9a63.slice: Failed to open /run/user/2223/systemd/transient/user-libpod_pod_ee1aa9ed4b4dea627a3d833cd7d20d16c5f47ae6d4d7a0f824af071a396c9a63.slice: No such file or directory Jan 06 13:45:59 managed-node2 quadlet-pod-pod-pod[39883]: quadlet-pod Jan 06 13:45:59 managed-node2 sudo[39833]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jan 06 13:45:59 managed-node2 systemd[35553]: user-libpod_pod_ee1aa9ed4b4dea627a3d833cd7d20d16c5f47ae6d4d7a0f824af071a396c9a63.slice: Failed to open /run/user/2223/systemd/transient/user-libpod_pod_ee1aa9ed4b4dea627a3d833cd7d20d16c5f47ae6d4d7a0f824af071a396c9a63.slice: No such file or directory Jan 06 13:45:59 managed-node2 quadlet-pod-pod-pod[39899]: ee1aa9ed4b4dea627a3d833cd7d20d16c5f47ae6d4d7a0f824af071a396c9a63 Jan 06 13:46:00 managed-node2 python3.12[40040]: ansible-stat Invoked with path=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 06 13:46:01 managed-node2 python3.12[40304]: ansible-file Invoked with path=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 06 13:46:01 managed-node2 sudo[40477]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-opguczyjdymgqtltgkeaolqkcxxappqw ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736189161.3444223-20072-44474249135958/AnsiballZ_systemd.py' Jan 06 13:46:01 managed-node2 sudo[40477]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-40477) opened. Jan 06 13:46:01 managed-node2 sudo[40477]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jan 06 13:46:01 managed-node2 python3.12[40480]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 06 13:46:01 managed-node2 systemd[35553]: Reload requested from client PID 40481 ('systemctl')... Jan 06 13:46:01 managed-node2 systemd[35553]: Reloading... Jan 06 13:46:01 managed-node2 systemd[35553]: Reloading finished in 62 ms. Jan 06 13:46:01 managed-node2 sudo[40477]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jan 06 13:46:02 managed-node2 sudo[40663]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-plzhaglaxzzvydhrqwvkrxkuiozjqaec ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736189162.0085576-20095-80168828636969/AnsiballZ_command.py' Jan 06 13:46:02 managed-node2 sudo[40663]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-40663) opened. Jan 06 13:46:02 managed-node2 sudo[40663]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jan 06 13:46:02 managed-node2 systemd[35553]: Started podman-40667.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 77. Jan 06 13:46:02 managed-node2 sudo[40663]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jan 06 13:46:02 managed-node2 sudo[40846]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tinojgzgqacflyqozllpewrrpkwlrriu ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736189162.6205332-20128-177115993325229/AnsiballZ_command.py' Jan 06 13:46:02 managed-node2 sudo[40846]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-40846) opened. Jan 06 13:46:02 managed-node2 sudo[40846]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jan 06 13:46:02 managed-node2 python3.12[40849]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:46:02 managed-node2 systemd[35553]: Started podman-40850.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 81. Jan 06 13:46:03 managed-node2 sudo[40846]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jan 06 13:46:03 managed-node2 sudo[41030]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-keuavmoimcwdwbgwtlvohgnuhozbmrxs ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736189163.4660225-20165-190599417036920/AnsiballZ_command.py' Jan 06 13:46:03 managed-node2 sudo[41030]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-41030) opened. Jan 06 13:46:03 managed-node2 sudo[41030]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jan 06 13:46:03 managed-node2 python3.12[41033]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:46:03 managed-node2 systemd[35553]: Started podman-41034.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 85. Jan 06 13:46:03 managed-node2 sudo[41030]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jan 06 13:46:04 managed-node2 sudo[41214]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wbxiyckqxizilrzuwatujicqiainydkt ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736189163.9582217-20185-5226058773477/AnsiballZ_command.py' Jan 06 13:46:04 managed-node2 sudo[41214]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-41214) opened. Jan 06 13:46:04 managed-node2 sudo[41214]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jan 06 13:46:04 managed-node2 python3.12[41217]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:46:04 managed-node2 systemd[35553]: Started podman-41218.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 89. Jan 06 13:46:04 managed-node2 sudo[41214]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jan 06 13:46:04 managed-node2 sudo[41399]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-uajumrtadhbgmyemetrqtefwrckzbstl ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736189164.4578116-20201-120176848056859/AnsiballZ_command.py' Jan 06 13:46:04 managed-node2 sudo[41399]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-41399) opened. Jan 06 13:46:04 managed-node2 sudo[41399]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jan 06 13:46:04 managed-node2 python3.12[41402]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:46:04 managed-node2 systemd[35553]: Started podman-41403.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 93. Jan 06 13:46:04 managed-node2 sudo[41399]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jan 06 13:46:05 managed-node2 sudo[41582]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-taasegtnfrnmhaqjhctxhvaqdylbqpap ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736189164.9426327-20212-60594231984498/AnsiballZ_command.py' Jan 06 13:46:05 managed-node2 sudo[41582]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-41582) opened. Jan 06 13:46:05 managed-node2 sudo[41582]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jan 06 13:46:05 managed-node2 python3.12[41585]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:46:05 managed-node2 systemd[35553]: Started podman-41586.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 97. Jan 06 13:46:05 managed-node2 sudo[41582]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jan 06 13:46:05 managed-node2 sudo[41766]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-uhhyeqdsjkjbgdkfldqlnkdmcexdccie ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736189165.4314206-20222-168457650764953/AnsiballZ_command.py' Jan 06 13:46:05 managed-node2 sudo[41766]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-41766) opened. Jan 06 13:46:05 managed-node2 sudo[41766]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jan 06 13:46:05 managed-node2 systemd[35553]: Started podman-41770.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 101. Jan 06 13:46:05 managed-node2 sudo[41766]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jan 06 13:46:06 managed-node2 sudo[41951]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hwsqxtxnrudlstxlsdimgvzaarjozqmh ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736189165.972866-20245-83012544825463/AnsiballZ_command.py' Jan 06 13:46:06 managed-node2 sudo[41951]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-41951) opened. Jan 06 13:46:06 managed-node2 sudo[41951]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jan 06 13:46:06 managed-node2 systemd[35553]: Started podman-41955.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 105. Jan 06 13:46:06 managed-node2 sudo[41951]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jan 06 13:46:06 managed-node2 sudo[42134]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-iiegdbzmzhekkmiknywnmopofsvnzwel ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736189166.4645174-20308-64213191785446/AnsiballZ_service_facts.py' Jan 06 13:46:06 managed-node2 sudo[42134]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-42134) opened. Jan 06 13:46:06 managed-node2 sudo[42134]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jan 06 13:46:06 managed-node2 python3.12[42137]: ansible-service_facts Invoked Jan 06 13:46:09 managed-node2 sudo[42134]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jan 06 13:46:10 managed-node2 python3.12[42375]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 06 13:46:10 managed-node2 python3.12[42508]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:46:11 managed-node2 python3.12[42640]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:46:12 managed-node2 python3.12[42772]: ansible-stat Invoked with path=/run/user/2223 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 06 13:46:12 managed-node2 sudo[42947]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zfzwkxwofmhmeimvqpbnuoopinchehfd ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736189172.7276819-20590-268043602830232/AnsiballZ_systemd.py' Jan 06 13:46:12 managed-node2 sudo[42947]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-42947) opened. Jan 06 13:46:12 managed-node2 sudo[42947]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jan 06 13:46:13 managed-node2 python3.12[42950]: ansible-systemd Invoked with name=quadlet-pod-pod-pod.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 06 13:46:13 managed-node2 systemd[35553]: Reload requested from client PID 42953 ('systemctl')... Jan 06 13:46:13 managed-node2 systemd[35553]: Reloading... Jan 06 13:46:13 managed-node2 systemd[35553]: Reloading finished in 62 ms. Jan 06 13:46:13 managed-node2 sudo[42947]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jan 06 13:46:13 managed-node2 python3.12[43093]: ansible-stat Invoked with path=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 06 13:46:15 managed-node2 python3.12[43357]: ansible-file Invoked with path=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 06 13:46:15 managed-node2 sudo[43530]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wfevejcjivdytpiuoeknuorkajdsphqz ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736189175.3298972-20711-144097727650485/AnsiballZ_systemd.py' Jan 06 13:46:15 managed-node2 sudo[43530]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-43530) opened. Jan 06 13:46:15 managed-node2 sudo[43530]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jan 06 13:46:15 managed-node2 python3.12[43533]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 06 13:46:15 managed-node2 systemd[35553]: Reload requested from client PID 43534 ('systemctl')... Jan 06 13:46:15 managed-node2 systemd[35553]: Reloading... Jan 06 13:46:15 managed-node2 systemd[35553]: Reloading finished in 62 ms. Jan 06 13:46:15 managed-node2 sudo[43530]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jan 06 13:46:16 managed-node2 sudo[43716]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-llvqzqjfpgdpnudnxxwmxqosudishfsj ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736189176.1420093-20751-27531012517819/AnsiballZ_command.py' Jan 06 13:46:16 managed-node2 sudo[43716]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-43716) opened. Jan 06 13:46:16 managed-node2 sudo[43716]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jan 06 13:46:16 managed-node2 systemd[35553]: Started podman-43720.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 109. Jan 06 13:46:16 managed-node2 sudo[43716]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jan 06 13:46:17 managed-node2 sudo[43899]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gqhqnzewvksqgttggjqskgskczuojits ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736189176.8611283-20789-37014797380012/AnsiballZ_command.py' Jan 06 13:46:17 managed-node2 sudo[43899]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-43899) opened. Jan 06 13:46:17 managed-node2 sudo[43899]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jan 06 13:46:17 managed-node2 python3.12[43902]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:46:17 managed-node2 systemd[35553]: Started podman-43903.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 113. Jan 06 13:46:17 managed-node2 sudo[43899]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jan 06 13:46:18 managed-node2 sudo[44082]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cnmxsbwexaawcuwpuykfqeuvwgrxkckm ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736189177.8167844-20836-241094605833627/AnsiballZ_command.py' Jan 06 13:46:18 managed-node2 sudo[44082]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-44082) opened. Jan 06 13:46:18 managed-node2 sudo[44082]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jan 06 13:46:18 managed-node2 python3.12[44085]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:46:18 managed-node2 systemd[35553]: Started podman-44086.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 117. Jan 06 13:46:18 managed-node2 sudo[44082]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jan 06 13:46:18 managed-node2 sudo[44266]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-npfyjproyowktzvpykayjyyptskrjgnd ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736189178.362549-20864-270347561383766/AnsiballZ_command.py' Jan 06 13:46:18 managed-node2 sudo[44266]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-44266) opened. Jan 06 13:46:18 managed-node2 sudo[44266]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jan 06 13:46:18 managed-node2 python3.12[44269]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:46:18 managed-node2 systemd[35553]: Started podman-44270.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 121. Jan 06 13:46:18 managed-node2 sudo[44266]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jan 06 13:46:19 managed-node2 sudo[44450]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fdjvnzmkchmlruxtzvngjcvwnpngcbus ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736189178.9309702-20895-106637574064302/AnsiballZ_command.py' Jan 06 13:46:19 managed-node2 sudo[44450]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-44450) opened. Jan 06 13:46:19 managed-node2 sudo[44450]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jan 06 13:46:19 managed-node2 python3.12[44453]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:46:19 managed-node2 systemd[35553]: Started podman-44454.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 125. Jan 06 13:46:19 managed-node2 sudo[44450]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jan 06 13:46:19 managed-node2 sudo[44634]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-adjplntiynpjdzyseyewhadmkpvwgpxe ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736189179.5147965-20925-89448489317408/AnsiballZ_command.py' Jan 06 13:46:19 managed-node2 sudo[44634]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-44634) opened. Jan 06 13:46:19 managed-node2 sudo[44634]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jan 06 13:46:19 managed-node2 python3.12[44637]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:46:19 managed-node2 systemd[35553]: Started podman-44638.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 129. Jan 06 13:46:19 managed-node2 sudo[44634]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jan 06 13:46:20 managed-node2 sudo[44817]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-budljlxplqkvtfmznlfikdegjpkuvwwo ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736189180.0853739-20954-50673196602076/AnsiballZ_command.py' Jan 06 13:46:20 managed-node2 sudo[44817]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-44817) opened. Jan 06 13:46:20 managed-node2 sudo[44817]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jan 06 13:46:20 managed-node2 systemd[35553]: Started podman-44821.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 133. Jan 06 13:46:20 managed-node2 sudo[44817]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jan 06 13:46:20 managed-node2 sudo[45001]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ctqiudvnlirdroakrwvqdalbfaahlynq ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736189180.6239047-20986-274392683824153/AnsiballZ_command.py' Jan 06 13:46:20 managed-node2 sudo[45001]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-45001) opened. Jan 06 13:46:20 managed-node2 sudo[45001]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jan 06 13:46:21 managed-node2 systemd[35553]: Started podman-45005.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 137. Jan 06 13:46:21 managed-node2 sudo[45001]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jan 06 13:46:21 managed-node2 sudo[45184]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cpxgndghfodmgqlovbpjsbrcylvdukdv ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736189181.1584218-21017-220019652630861/AnsiballZ_service_facts.py' Jan 06 13:46:21 managed-node2 sudo[45184]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-45184) opened. Jan 06 13:46:21 managed-node2 sudo[45184]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jan 06 13:46:21 managed-node2 python3.12[45187]: ansible-service_facts Invoked Jan 06 13:46:22 managed-node2 sudo[45184]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jan 06 13:46:24 managed-node2 python3.12[45425]: ansible-stat Invoked with path=/run/user/2223 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 06 13:46:24 managed-node2 sudo[45600]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fglbmazozocurmcshfjdfffrdccvyucf ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736189184.1882167-21161-134697077340025/AnsiballZ_podman_container_info.py' Jan 06 13:46:24 managed-node2 sudo[45600]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-45600) opened. Jan 06 13:46:24 managed-node2 sudo[45600]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jan 06 13:46:24 managed-node2 python3.12[45603]: ansible-containers.podman.podman_container_info Invoked with executable=podman name=None Jan 06 13:46:24 managed-node2 systemd[35553]: Started podman-45604.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 141. Jan 06 13:46:24 managed-node2 sudo[45600]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jan 06 13:46:25 managed-node2 sudo[45783]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-batlhetyjgksldzuxscilsuandfzuxgq ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736189184.8986495-21194-4709292709894/AnsiballZ_command.py' Jan 06 13:46:25 managed-node2 sudo[45783]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-45783) opened. Jan 06 13:46:25 managed-node2 sudo[45783]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jan 06 13:46:25 managed-node2 python3.12[45786]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:46:25 managed-node2 systemd[35553]: Started podman-45787.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 145. Jan 06 13:46:25 managed-node2 sudo[45783]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jan 06 13:46:25 managed-node2 sudo[45966]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mudknwisimdxlobxqaxfgjkenluyujzt ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736189185.4039495-21223-90159224052632/AnsiballZ_command.py' Jan 06 13:46:25 managed-node2 sudo[45966]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-45966) opened. Jan 06 13:46:25 managed-node2 sudo[45966]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jan 06 13:46:25 managed-node2 python3.12[45969]: ansible-ansible.legacy.command Invoked with _raw_params=podman secret ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:46:25 managed-node2 systemd[35553]: Started podman-45970.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 149. Jan 06 13:46:25 managed-node2 sudo[45966]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jan 06 13:46:26 managed-node2 python3.12[46107]: ansible-ansible.legacy.command Invoked with removes=/var/lib/systemd/linger/user_quadlet_pod _raw_params=loginctl disable-linger user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None stdin=None Jan 06 13:46:26 managed-node2 python3.12[46239]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:46:31 managed-node2 python3.12[46371]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:46:37 managed-node2 python3.12[46503]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:46:42 managed-node2 python3.12[46635]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:46:43 managed-node2 systemd[23127]: Starting grub-boot-success.service - Mark boot as successful... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 116. Jan 06 13:46:43 managed-node2 systemd[23127]: Finished grub-boot-success.service - Mark boot as successful. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 116. Jan 06 13:46:43 managed-node2 python3.12[46767]: ansible-ansible.legacy.systemd Invoked with name=systemd-logind state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Jan 06 13:46:43 managed-node2 systemd[1]: Stopping systemd-logind.service - User Login Management... ░░ Subject: A stop job for unit systemd-logind.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-logind.service has begun execution. ░░ ░░ The job identifier is 3044. Jan 06 13:46:43 managed-node2 systemd[1]: systemd-logind.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-logind.service has successfully entered the 'dead' state. Jan 06 13:46:43 managed-node2 systemd[1]: Stopped systemd-logind.service - User Login Management. ░░ Subject: A stop job for unit systemd-logind.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-logind.service has finished. ░░ ░░ The job identifier is 3044 and the job result is done. Jan 06 13:46:43 managed-node2 python3.12[46917]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:46:43 managed-node2 systemd[1]: Starting modprobe@drm.service - Load Kernel Module drm... ░░ Subject: A start job for unit modprobe@drm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@drm.service has begun execution. ░░ ░░ The job identifier is 3124. Jan 06 13:46:43 managed-node2 systemd[1]: modprobe@drm.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit modprobe@drm.service has successfully entered the 'dead' state. Jan 06 13:46:43 managed-node2 systemd[1]: Finished modprobe@drm.service - Load Kernel Module drm. ░░ Subject: A start job for unit modprobe@drm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@drm.service has finished successfully. ░░ ░░ The job identifier is 3124. Jan 06 13:46:43 managed-node2 systemd[1]: Starting systemd-logind.service - User Login Management... ░░ Subject: A start job for unit systemd-logind.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-logind.service has begun execution. ░░ ░░ The job identifier is 3045. Jan 06 13:46:43 managed-node2 systemd-logind[46921]: New seat seat0. ░░ Subject: A new seat seat0 is now available ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new seat seat0 has been configured and is now available. Jan 06 13:46:43 managed-node2 systemd-logind[46921]: Watching system buttons on /dev/input/event0 (Power Button) Jan 06 13:46:43 managed-node2 systemd-logind[46921]: Watching system buttons on /dev/input/event1 (Sleep Button) Jan 06 13:46:43 managed-node2 systemd-logind[46921]: Watching system buttons on /dev/input/event2 (AT Translated Set 2 keyboard) Jan 06 13:46:43 managed-node2 systemd[1]: Started systemd-logind.service - User Login Management. ░░ Subject: A start job for unit systemd-logind.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-logind.service has finished successfully. ░░ ░░ The job identifier is 3045. Jan 06 13:46:49 managed-node2 python3.12[47052]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:46:53 managed-node2 systemd[1]: Stopping user@2223.service - User Manager for UID 2223... ░░ Subject: A stop job for unit user@2223.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user@2223.service has begun execution. ░░ ░░ The job identifier is 3448. Jan 06 13:46:53 managed-node2 systemd[35553]: Activating special unit exit.target... Jan 06 13:46:53 managed-node2 systemd[1]: Stopping session-4.scope - Session 4 of User root... ░░ Subject: A stop job for unit session-4.scope has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit session-4.scope has begun execution. ░░ ░░ The job identifier is 3450. Jan 06 13:46:53 managed-node2 systemd[1]: Stopping session-5.scope - Session 5 of User root... ░░ Subject: A stop job for unit session-5.scope has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit session-5.scope has begun execution. ░░ ░░ The job identifier is 3451. Jan 06 13:46:53 managed-node2 sshd-session[6642]: error: mm_reap: preauth child terminated by signal 15 Jan 06 13:46:53 managed-node2 sshd-session[4526]: error: mm_reap: preauth child terminated by signal 15 Jan 06 13:46:53 managed-node2 systemd[35553]: Stopping podman-pause-f492329d.scope... ░░ Subject: A stop job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has begun execution. ░░ ░░ The job identifier is 172. Jan 06 13:46:53 managed-node2 systemd[35553]: Stopped target default.target - Main User Target. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 170 and the job result is done. Jan 06 13:46:53 managed-node2 sshd-session[4526]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-4526) opened. Jan 06 13:46:53 managed-node2 sshd-session[6642]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-6642) opened. Jan 06 13:46:53 managed-node2 systemd[35553]: Stopped podman-user-wait-network-online.service - Wait for system level network-online.target as user.. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 160 and the job result is done. Jan 06 13:46:53 managed-node2 systemd[35553]: Stopped target basic.target - Basic System. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 169 and the job result is done. Jan 06 13:46:53 managed-node2 systemd[35553]: Stopped target paths.target - Paths. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 165 and the job result is done. Jan 06 13:46:53 managed-node2 systemd[35553]: Stopped target sockets.target - Sockets. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 166 and the job result is done. Jan 06 13:46:53 managed-node2 systemd[35553]: Stopped target timers.target - Timers. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 156 and the job result is done. Jan 06 13:46:53 managed-node2 systemd[35553]: Stopped grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 173 and the job result is done. Jan 06 13:46:53 managed-node2 systemd[35553]: Stopped systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 157 and the job result is done. Jan 06 13:46:53 managed-node2 systemd[35553]: Stopping dbus-broker.service - D-Bus User Message Bus... ░░ Subject: A stop job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has begun execution. ░░ ░░ The job identifier is 159. Jan 06 13:46:53 managed-node2 systemd[35553]: Stopped systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 162 and the job result is done. Jan 06 13:46:53 managed-node2 systemd[35553]: Stopped podman-pause-f492329d.scope. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 172 and the job result is done. Jan 06 13:46:53 managed-node2 systemd[35553]: Removed slice user.slice - Slice /user. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 171 and the job result is done. Jan 06 13:46:53 managed-node2 dbus-broker[36369]: Dispatched 2174 messages @ 3(±16)μs / message. ░░ Subject: Dispatched 2174 messages ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ This message is printed by dbus-broker when shutting down. It includes metric ░░ information collected during the runtime of dbus-broker. ░░ ░░ The message lists the number of dispatched messages ░░ (in this case 2174) as well as the mean time to ░░ handling a single message. The time measurements exclude the time spent on ░░ writing to and reading from the kernel. Jan 06 13:46:53 managed-node2 systemd[35553]: Stopped dbus-broker.service - D-Bus User Message Bus. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 159 and the job result is done. Jan 06 13:46:53 managed-node2 systemd[35553]: Removed slice session.slice - User Core Session Slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 161 and the job result is done. Jan 06 13:46:53 managed-node2 systemd[35553]: Closed dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 158 and the job result is done. Jan 06 13:46:53 managed-node2 systemd[35553]: Removed slice app.slice - User Application Slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 167 and the job result is done. Jan 06 13:46:53 managed-node2 systemd[35553]: Reached target shutdown.target - Shutdown. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 155. Jan 06 13:46:53 managed-node2 systemd[35553]: Finished systemd-exit.service - Exit the Session. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 154. Jan 06 13:46:53 managed-node2 systemd[35553]: Reached target exit.target - Exit the Session. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 153. Jan 06 13:46:53 managed-node2 systemd[1]: user@2223.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user@2223.service has successfully entered the 'dead' state. Jan 06 13:46:53 managed-node2 systemd[1]: Stopped user@2223.service - User Manager for UID 2223. ░░ Subject: A stop job for unit user@2223.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user@2223.service has finished. ░░ ░░ The job identifier is 3448 and the job result is done. Jan 06 13:46:53 managed-node2 systemd[1]: Stopping user-runtime-dir@2223.service - User Runtime Directory /run/user/2223... ░░ Subject: A stop job for unit user-runtime-dir@2223.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-runtime-dir@2223.service has begun execution. ░░ ░░ The job identifier is 3447. Jan 06 13:46:53 managed-node2 sshd-session[6642]: pam_systemd(sshd:session): Failed to release session: No session '5' known Jan 06 13:46:53 managed-node2 sshd-session[6642]: pam_unix(sshd:session): session closed for user root Jan 06 13:46:53 managed-node2 sshd-session[4526]: pam_systemd(sshd:session): Failed to release session: No session '4' known Jan 06 13:46:53 managed-node2 sshd-session[4526]: pam_unix(sshd:session): session closed for user root Jan 06 13:46:53 managed-node2 systemd[1]: session-5.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-5.scope has successfully entered the 'dead' state. Jan 06 13:46:53 managed-node2 systemd[1]: Stopped session-5.scope - Session 5 of User root. ░░ Subject: A stop job for unit session-5.scope has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit session-5.scope has finished. ░░ ░░ The job identifier is 3451 and the job result is done. Jan 06 13:46:53 managed-node2 systemd[1]: session-4.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-4.scope has successfully entered the 'dead' state. Jan 06 13:46:53 managed-node2 systemd[1]: Stopped session-4.scope - Session 4 of User root. ░░ Subject: A stop job for unit session-4.scope has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit session-4.scope has finished. ░░ ░░ The job identifier is 3450 and the job result is done. Jan 06 13:46:53 managed-node2 systemd[1]: Stopping user@0.service - User Manager for UID 0... ░░ Subject: A stop job for unit user@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user@0.service has begun execution. ░░ ░░ The job identifier is 3452. Jan 06 13:46:53 managed-node2 systemd[4479]: Activating special unit exit.target... Jan 06 13:46:53 managed-node2 systemd[4479]: Removed slice background.slice - User Background Tasks Slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 22 and the job result is done. Jan 06 13:46:53 managed-node2 systemd[4479]: Stopped target default.target - Main User Target. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 33 and the job result is done. Jan 06 13:46:53 managed-node2 systemd[4479]: Stopped target basic.target - Basic System. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 32 and the job result is done. Jan 06 13:46:53 managed-node2 systemd[4479]: Stopped target paths.target - Paths. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 28 and the job result is done. Jan 06 13:46:53 managed-node2 systemd[4479]: Stopped target sockets.target - Sockets. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 31 and the job result is done. Jan 06 13:46:53 managed-node2 systemd[4479]: Stopped target timers.target - Timers. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 30 and the job result is done. Jan 06 13:46:53 managed-node2 systemd[4479]: Stopped systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 24 and the job result is done. Jan 06 13:46:53 managed-node2 systemd[4479]: Closed dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 27 and the job result is done. Jan 06 13:46:53 managed-node2 systemd[4479]: Stopped systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 26 and the job result is done. Jan 06 13:46:53 managed-node2 systemd[4479]: Removed slice app.slice - User Application Slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 25 and the job result is done. Jan 06 13:46:53 managed-node2 systemd[4479]: Reached target shutdown.target - Shutdown. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 19. Jan 06 13:46:53 managed-node2 systemd[4479]: Finished systemd-exit.service - Exit the Session. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 18. Jan 06 13:46:53 managed-node2 systemd[4479]: Reached target exit.target - Exit the Session. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 17. Jan 06 13:46:53 managed-node2 systemd[1]: run-user-2223.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-user-2223.mount has successfully entered the 'dead' state. Jan 06 13:46:53 managed-node2 systemd[1]: user-runtime-dir@2223.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user-runtime-dir@2223.service has successfully entered the 'dead' state. Jan 06 13:46:53 managed-node2 systemd[1]: Stopped user-runtime-dir@2223.service - User Runtime Directory /run/user/2223. ░░ Subject: A stop job for unit user-runtime-dir@2223.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-runtime-dir@2223.service has finished. ░░ ░░ The job identifier is 3447 and the job result is done. Jan 06 13:46:53 managed-node2 systemd[1]: Removed slice user-2223.slice - User Slice of UID 2223. ░░ Subject: A stop job for unit user-2223.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-2223.slice has finished. ░░ ░░ The job identifier is 3453 and the job result is done. Jan 06 13:46:53 managed-node2 systemd[1]: user@0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user@0.service has successfully entered the 'dead' state. Jan 06 13:46:53 managed-node2 systemd[1]: Stopped user@0.service - User Manager for UID 0. ░░ Subject: A stop job for unit user@0.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user@0.service has finished. ░░ ░░ The job identifier is 3452 and the job result is done. Jan 06 13:46:53 managed-node2 systemd[1]: Stopping user-runtime-dir@0.service - User Runtime Directory /run/user/0... ░░ Subject: A stop job for unit user-runtime-dir@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-runtime-dir@0.service has begun execution. ░░ ░░ The job identifier is 3449. Jan 06 13:46:53 managed-node2 systemd[1]: run-user-0.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-user-0.mount has successfully entered the 'dead' state. Jan 06 13:46:53 managed-node2 systemd[1]: user-runtime-dir@0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user-runtime-dir@0.service has successfully entered the 'dead' state. Jan 06 13:46:53 managed-node2 systemd[1]: Stopped user-runtime-dir@0.service - User Runtime Directory /run/user/0. ░░ Subject: A stop job for unit user-runtime-dir@0.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-runtime-dir@0.service has finished. ░░ ░░ The job identifier is 3449 and the job result is done. Jan 06 13:46:53 managed-node2 systemd[1]: Removed slice user-0.slice - User Slice of UID 0. ░░ Subject: A stop job for unit user-0.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-0.slice has finished. ░░ ░░ The job identifier is 3455 and the job result is done. Jan 06 13:46:54 managed-node2 sshd-session[47083]: Accepted publickey for root from 10.31.42.234 port 42066 ssh2: RSA SHA256:9j1blwt3wcrRiGYZQ7ZGu9axm3cDklH6/z4c+Ee8CzE Jan 06 13:46:54 managed-node2 sshd-session[47083]: pam_systemd(sshd:session): New sd-bus connection (system-bus-pam-systemd-47083) opened. Jan 06 13:46:54 managed-node2 systemd-logind[46921]: New session 8 of user root. ░░ Subject: A new session 8 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 8 has been created for the user root. ░░ ░░ The leading process of the session is 47083. Jan 06 13:46:54 managed-node2 systemd[1]: Created slice user-0.slice - User Slice of UID 0. ░░ Subject: A start job for unit user-0.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-0.slice has finished successfully. ░░ ░░ The job identifier is 3458. Jan 06 13:46:54 managed-node2 systemd[1]: Starting user-runtime-dir@0.service - User Runtime Directory /run/user/0... ░░ Subject: A start job for unit user-runtime-dir@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has begun execution. ░░ ░░ The job identifier is 3457. Jan 06 13:46:54 managed-node2 systemd[1]: Finished user-runtime-dir@0.service - User Runtime Directory /run/user/0. ░░ Subject: A start job for unit user-runtime-dir@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has finished successfully. ░░ ░░ The job identifier is 3457. Jan 06 13:46:54 managed-node2 systemd[1]: Starting user@0.service - User Manager for UID 0... ░░ Subject: A start job for unit user@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has begun execution. ░░ ░░ The job identifier is 3537. Jan 06 13:46:54 managed-node2 systemd-logind[46921]: New session 9 of user root. ░░ Subject: A new session 9 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 9 has been created for the user root. ░░ ░░ The leading process of the session is 47088. Jan 06 13:46:54 managed-node2 (systemd)[47088]: pam_unix(systemd-user:session): session opened for user root(uid=0) by root(uid=0) Jan 06 13:46:54 managed-node2 systemd[47088]: Queued start job for default target default.target. Jan 06 13:46:54 managed-node2 systemd[47088]: Created slice app.slice - User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 8. Jan 06 13:46:54 managed-node2 systemd[47088]: grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes was skipped because of an unmet condition check (ConditionUser=!@system). ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 6. Jan 06 13:46:54 managed-node2 systemd[47088]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 4. Jan 06 13:46:54 managed-node2 systemd[47088]: Reached target paths.target - Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 10. Jan 06 13:46:54 managed-node2 systemd[47088]: Reached target timers.target - Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Jan 06 13:46:54 managed-node2 systemd[47088]: Starting dbus.socket - D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 12. Jan 06 13:46:54 managed-node2 systemd[47088]: Starting systemd-tmpfiles-setup.service - Create User Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 7. Jan 06 13:46:54 managed-node2 systemd[47088]: Listening on dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Jan 06 13:46:54 managed-node2 systemd[47088]: Reached target sockets.target - Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 11. Jan 06 13:46:54 managed-node2 systemd[47088]: Finished systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 7. Jan 06 13:46:54 managed-node2 systemd[47088]: Reached target basic.target - Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Jan 06 13:46:54 managed-node2 systemd[47088]: Reached target default.target - Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Jan 06 13:46:54 managed-node2 systemd[47088]: Startup finished in 132ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 0 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 132143 microseconds. Jan 06 13:46:54 managed-node2 systemd[1]: Started user@0.service - User Manager for UID 0. ░░ Subject: A start job for unit user@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has finished successfully. ░░ ░░ The job identifier is 3537. Jan 06 13:46:54 managed-node2 systemd[1]: Started session-8.scope - Session 8 of User root. ░░ Subject: A start job for unit session-8.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-8.scope has finished successfully. ░░ ░░ The job identifier is 3618. Jan 06 13:46:54 managed-node2 sshd-session[47083]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Jan 06 13:46:55 managed-node2 python3.12[47215]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:46:56 managed-node2 python3.12[47347]: ansible-stat Invoked with path=/var/lib/systemd/linger/user_quadlet_pod follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 06 13:46:58 managed-node2 python3.12[47609]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:46:59 managed-node2 python3.12[47746]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 06 13:46:59 managed-node2 python3.12[47879]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:47:00 managed-node2 python3.12[48011]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:47:02 managed-node2 python3.12[48143]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 06 13:47:03 managed-node2 python3.12[48276]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:47:03 managed-node2 python3.12[48408]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:47:04 managed-node2 python3.12[48540]: ansible-stat Invoked with path=/run/user/2223 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 06 13:47:05 managed-node2 python3.12[48671]: ansible-stat Invoked with path=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 06 13:47:05 managed-node2 python3.12[48802]: ansible-file Invoked with path=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 06 13:47:07 managed-node2 python3.12[48933]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 06 13:47:07 managed-node2 python3.12[49066]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:47:08 managed-node2 python3.12[49198]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:47:09 managed-node2 python3.12[49330]: ansible-stat Invoked with path=/run/user/2223 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 06 13:47:09 managed-node2 python3.12[49461]: ansible-stat Invoked with path=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 06 13:47:10 managed-node2 python3.12[49592]: ansible-file Invoked with path=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 06 13:47:11 managed-node2 python3.12[49723]: ansible-stat Invoked with path=/run/user/2223 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 06 13:47:12 managed-node2 python3.12[49854]: ansible-user Invoked with name=user_quadlet_pod state=absent non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node2 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Jan 06 13:47:12 managed-node2 userdel[49856]: delete user 'user_quadlet_pod' Jan 06 13:47:12 managed-node2 userdel[49856]: removed group 'user_quadlet_pod' owned by 'user_quadlet_pod' Jan 06 13:47:12 managed-node2 userdel[49856]: removed shadow group 'user_quadlet_pod' owned by 'user_quadlet_pod' Jan 06 13:47:14 managed-node2 python3.12[50118]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:47:15 managed-node2 python3.12[50256]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jan 06 13:47:16 managed-node2 python3.12[50388]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 06 13:47:18 managed-node2 python3.12[50521]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 06 13:47:20 managed-node2 python3.12[50654]: ansible-systemd Invoked with name=quadlet-pod-container.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 06 13:47:20 managed-node2 systemd[1]: Reload requested from client PID 50657 ('systemctl') (unit session-8.scope)... Jan 06 13:47:20 managed-node2 systemd[1]: Reloading... Jan 06 13:47:20 managed-node2 systemd[1]: Reloading finished in 221 ms. Jan 06 13:47:20 managed-node2 systemd[1]: Stopping quadlet-pod-container.service... ░░ Subject: A stop job for unit quadlet-pod-container.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit quadlet-pod-container.service has begun execution. ░░ ░░ The job identifier is 3700. Jan 06 13:47:30 managed-node2 quadlet-pod-container[50712]: time="2025-01-06T13:47:30-05:00" level=warning msg="StopSignal SIGTERM failed to stop container quadlet-pod-container in 10 seconds, resorting to SIGKILL" Jan 06 13:47:30 managed-node2 podman[50712]: 2025-01-06 13:47:30.798443605 -0500 EST m=+10.053395039 container died 5b47883bef44d3eb9b4823d123502276bc4b13bb88330fc9ef67e816c6c5db72 (image=quay.io/libpod/testimage:20210610, name=quadlet-pod-container, created_by=test/system/build-testimage, io.buildah.version=1.21.0, PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service, created_at=2021-06-10T18:55:36Z) Jan 06 13:47:30 managed-node2 systemd[1]: var-lib-containers-storage-overlay-5a59880878af1b2a087857e0a51a9ac8cd64653e75a8a2ae0272a2452a84d5fb-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-5a59880878af1b2a087857e0a51a9ac8cd64653e75a8a2ae0272a2452a84d5fb-merged.mount has successfully entered the 'dead' state. Jan 06 13:47:30 managed-node2 podman[50712]: 2025-01-06 13:47:30.846290771 -0500 EST m=+10.101242121 container remove 5b47883bef44d3eb9b4823d123502276bc4b13bb88330fc9ef67e816c6c5db72 (image=quay.io/libpod/testimage:20210610, name=quadlet-pod-container, pod_id=1f760fc9d855ff708975a51c50533b08336b84a95131aaebdda963d92fd52090, PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jan 06 13:47:30 managed-node2 quadlet-pod-container[50712]: 5b47883bef44d3eb9b4823d123502276bc4b13bb88330fc9ef67e816c6c5db72 Jan 06 13:47:30 managed-node2 podman[50712]: 2025-01-06 13:47:30.849091737 -0500 EST m=+10.104043259 pod stop 1f760fc9d855ff708975a51c50533b08336b84a95131aaebdda963d92fd52090 (image=, name=quadlet-pod) Jan 06 13:47:30 managed-node2 systemd[1]: quadlet-pod-container.service: Main process exited, code=exited, status=137/n/a ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit quadlet-pod-container.service has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 137. Jan 06 13:47:30 managed-node2 systemd[1]: libpod-a6a3908e6216e9d64eaaf80cf4e238adc1ac5a86557f6d7d29e3bcb37faea524.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-a6a3908e6216e9d64eaaf80cf4e238adc1ac5a86557f6d7d29e3bcb37faea524.scope has successfully entered the 'dead' state. Jan 06 13:47:30 managed-node2 podman[50712]: 2025-01-06 13:47:30.868447717 -0500 EST m=+10.123399150 container died a6a3908e6216e9d64eaaf80cf4e238adc1ac5a86557f6d7d29e3bcb37faea524 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 06 13:47:30 managed-node2 kernel: podman0: port 1(veth2) entered disabled state Jan 06 13:47:30 managed-node2 kernel: veth2 (unregistering): left allmulticast mode Jan 06 13:47:30 managed-node2 kernel: veth2 (unregistering): left promiscuous mode Jan 06 13:47:30 managed-node2 kernel: podman0: port 1(veth2) entered disabled state Jan 06 13:47:30 managed-node2 NetworkManager[778]: [1736189250.9014] device (podman0): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jan 06 13:47:30 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 3702. Jan 06 13:47:30 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 3702. Jan 06 13:47:30 managed-node2 systemd[1]: run-netns-netns\x2dbafbbd3e\x2d9e6a\x2de55c\x2d27af\x2dc8baf42222a9.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2dbafbbd3e\x2d9e6a\x2de55c\x2d27af\x2dc8baf42222a9.mount has successfully entered the 'dead' state. Jan 06 13:47:31 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-a6a3908e6216e9d64eaaf80cf4e238adc1ac5a86557f6d7d29e3bcb37faea524-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-a6a3908e6216e9d64eaaf80cf4e238adc1ac5a86557f6d7d29e3bcb37faea524-userdata-shm.mount has successfully entered the 'dead' state. Jan 06 13:47:31 managed-node2 systemd[1]: var-lib-containers-storage-overlay-d5e5e8fcc647392de5d53202fcf8517f02373c949190e191276e21f67f814a6f-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-d5e5e8fcc647392de5d53202fcf8517f02373c949190e191276e21f67f814a6f-merged.mount has successfully entered the 'dead' state. Jan 06 13:47:31 managed-node2 podman[50712]: 2025-01-06 13:47:31.08668422 -0500 EST m=+10.341636339 container cleanup a6a3908e6216e9d64eaaf80cf4e238adc1ac5a86557f6d7d29e3bcb37faea524 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=1f760fc9d855ff708975a51c50533b08336b84a95131aaebdda963d92fd52090, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 06 13:47:31 managed-node2 systemd[1]: Removed slice machine-libpod_pod_1f760fc9d855ff708975a51c50533b08336b84a95131aaebdda963d92fd52090.slice - cgroup machine-libpod_pod_1f760fc9d855ff708975a51c50533b08336b84a95131aaebdda963d92fd52090.slice. ░░ Subject: A stop job for unit machine-libpod_pod_1f760fc9d855ff708975a51c50533b08336b84a95131aaebdda963d92fd52090.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_1f760fc9d855ff708975a51c50533b08336b84a95131aaebdda963d92fd52090.slice has finished. ░░ ░░ The job identifier is 3781 and the job result is done. Jan 06 13:47:31 managed-node2 podman[50712]: 2025-01-06 13:47:31.095681609 -0500 EST m=+10.350632958 pod stop 1f760fc9d855ff708975a51c50533b08336b84a95131aaebdda963d92fd52090 (image=, name=quadlet-pod) Jan 06 13:47:31 managed-node2 systemd[1]: machine-libpod_pod_1f760fc9d855ff708975a51c50533b08336b84a95131aaebdda963d92fd52090.slice: Failed to open /run/systemd/transient/machine-libpod_pod_1f760fc9d855ff708975a51c50533b08336b84a95131aaebdda963d92fd52090.slice: No such file or directory Jan 06 13:47:31 managed-node2 podman[50761]: 2025-01-06 13:47:31.148238481 -0500 EST m=+0.037313504 pod stop 1f760fc9d855ff708975a51c50533b08336b84a95131aaebdda963d92fd52090 (image=, name=quadlet-pod) Jan 06 13:47:31 managed-node2 systemd[1]: machine-libpod_pod_1f760fc9d855ff708975a51c50533b08336b84a95131aaebdda963d92fd52090.slice: Failed to open /run/systemd/transient/machine-libpod_pod_1f760fc9d855ff708975a51c50533b08336b84a95131aaebdda963d92fd52090.slice: No such file or directory Jan 06 13:47:31 managed-node2 quadlet-pod-pod-pod[50761]: quadlet-pod Jan 06 13:47:31 managed-node2 systemd[1]: quadlet-pod-container.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-pod-container.service has entered the 'failed' state with result 'exit-code'. Jan 06 13:47:31 managed-node2 systemd[1]: Stopped quadlet-pod-container.service. ░░ Subject: A stop job for unit quadlet-pod-container.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit quadlet-pod-container.service has finished. ░░ ░░ The job identifier is 3700 and the job result is done. Jan 06 13:47:31 managed-node2 podman[50779]: 2025-01-06 13:47:31.245583485 -0500 EST m=+0.068442853 container remove a6a3908e6216e9d64eaaf80cf4e238adc1ac5a86557f6d7d29e3bcb37faea524 (image=localhost/podman-pause:5.3.1-1733097600, name=quadlet-pod-infra, pod_id=1f760fc9d855ff708975a51c50533b08336b84a95131aaebdda963d92fd52090, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service, io.buildah.version=1.38.0) Jan 06 13:47:31 managed-node2 systemd[1]: machine-libpod_pod_1f760fc9d855ff708975a51c50533b08336b84a95131aaebdda963d92fd52090.slice: Failed to open /run/systemd/transient/machine-libpod_pod_1f760fc9d855ff708975a51c50533b08336b84a95131aaebdda963d92fd52090.slice: No such file or directory Jan 06 13:47:31 managed-node2 podman[50779]: 2025-01-06 13:47:31.258450942 -0500 EST m=+0.081310288 pod remove 1f760fc9d855ff708975a51c50533b08336b84a95131aaebdda963d92fd52090 (image=, name=quadlet-pod) Jan 06 13:47:31 managed-node2 quadlet-pod-pod-pod[50779]: 1f760fc9d855ff708975a51c50533b08336b84a95131aaebdda963d92fd52090 Jan 06 13:47:31 managed-node2 systemd[1]: quadlet-pod-pod-pod.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-pod-pod-pod.service has successfully entered the 'dead' state. Jan 06 13:47:31 managed-node2 python3.12[50922]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-pod-container.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 06 13:47:32 managed-node2 python3.12[51186]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-pod-container.container state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 06 13:47:33 managed-node2 python3.12[51317]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 06 13:47:33 managed-node2 systemd[1]: Reload requested from client PID 51318 ('systemctl') (unit session-8.scope)... Jan 06 13:47:33 managed-node2 systemd[1]: Reloading... Jan 06 13:47:33 managed-node2 systemd[1]: Reloading finished in 215 ms. Jan 06 13:47:34 managed-node2 python3.12[51640]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:47:35 managed-node2 python3.12[51778]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:47:35 managed-node2 python3.12[51916]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:47:35 managed-node2 python3.12[52055]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:47:36 managed-node2 python3.12[52193]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:47:37 managed-node2 python3.12[52608]: ansible-service_facts Invoked Jan 06 13:47:40 managed-node2 python3.12[52846]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 06 13:47:40 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jan 06 13:47:41 managed-node2 python3.12[52980]: ansible-systemd Invoked with name=quadlet-pod-pod-pod.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 06 13:47:41 managed-node2 systemd[1]: Reload requested from client PID 52983 ('systemctl') (unit session-8.scope)... Jan 06 13:47:41 managed-node2 systemd[1]: Reloading... Jan 06 13:47:42 managed-node2 systemd[1]: Reloading finished in 210 ms. Jan 06 13:47:42 managed-node2 python3.12[53167]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-pod-pod.pod follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 06 13:47:43 managed-node2 python3.12[53431]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-pod-pod.pod state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 06 13:47:44 managed-node2 python3.12[53562]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 06 13:47:44 managed-node2 systemd[1]: Reload requested from client PID 53563 ('systemctl') (unit session-8.scope)... Jan 06 13:47:44 managed-node2 systemd[1]: Reloading... Jan 06 13:47:44 managed-node2 systemd[1]: Reloading finished in 209 ms. Jan 06 13:47:44 managed-node2 systemd[1]: Starting dnf-makecache.service - dnf makecache... ░░ Subject: A start job for unit dnf-makecache.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dnf-makecache.service has begun execution. ░░ ░░ The job identifier is 3782. Jan 06 13:47:44 managed-node2 dnf[53616]: Beaker Client - RedHatEnterpriseLinux9 10 kB/s | 1.5 kB 00:00 Jan 06 13:47:44 managed-node2 dnf[53616]: Beaker harness 19 kB/s | 1.3 kB 00:00 Jan 06 13:47:45 managed-node2 dnf[53616]: Copr repo for beakerlib-libraries owned by bgon 3.8 kB/s | 1.8 kB 00:00 Jan 06 13:47:45 managed-node2 dnf[53616]: CentOS Stream 10 - BaseOS 45 kB/s | 2.3 kB 00:00 Jan 06 13:47:45 managed-node2 dnf[53616]: CentOS Stream 10 - AppStream 57 kB/s | 2.3 kB 00:00 Jan 06 13:47:45 managed-node2 python3.12[53890]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:47:45 managed-node2 dnf[53616]: CentOS Stream 10 - HighAvailability 30 kB/s | 2.3 kB 00:00 Jan 06 13:47:45 managed-node2 dnf[53616]: Metadata cache created. Jan 06 13:47:45 managed-node2 systemd[1]: dnf-makecache.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit dnf-makecache.service has successfully entered the 'dead' state. Jan 06 13:47:45 managed-node2 systemd[1]: Finished dnf-makecache.service - dnf makecache. ░░ Subject: A start job for unit dnf-makecache.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dnf-makecache.service has finished successfully. ░░ ░░ The job identifier is 3782. Jan 06 13:47:46 managed-node2 python3.12[54032]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:47:46 managed-node2 python3.12[54171]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:47:46 managed-node2 python3.12[54310]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:47:47 managed-node2 python3.12[54448]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 06 13:47:48 managed-node2 python3.12[54863]: ansible-service_facts Invoked Jan 06 13:47:51 managed-node2 python3.12[55101]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None PLAY RECAP ********************************************************************* managed-node2 : ok=386 changed=27 unreachable=0 failed=1 skipped=391 rescued=1 ignored=1 TASKS RECAP ******************************************************************** Monday 06 January 2025 13:47:51 -0500 (0:00:00.426) 0:03:00.433 ******** =============================================================================== fedora.linux_system_roles.podman : Wait for user session to exit closing state -- 16.47s /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:62 fedora.linux_system_roles.podman : Wait for user session to exit closing state -- 12.48s /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:81 fedora.linux_system_roles.podman : Stop and disable service ------------ 11.34s /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 fedora.linux_system_roles.podman : Stop and disable service ------------ 11.01s /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 fedora.linux_system_roles.podman : For testing and debugging - services --- 3.19s /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : Ensure container images are present --- 2.40s /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.07s /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.03s /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.01s /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Check files ------------------------------------------------------------- 1.78s /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:40 fedora.linux_system_roles.podman : Ensure container images are present --- 1.75s /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Gathering Facts --------------------------------------------------------- 1.50s /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:9 fedora.linux_system_roles.podman : Start service ------------------------ 1.37s /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 fedora.linux_system_roles.podman : Gather the package facts ------------- 1.25s /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : Reload systemctl --------------------- 1.16s /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 fedora.linux_system_roles.podman : Start service ------------------------ 0.97s /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 fedora.linux_system_roles.podman : Ensure quadlet file is present ------- 0.90s /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 fedora.linux_system_roles.podman : Stop and disable service ------------- 0.88s /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 fedora.linux_system_roles.podman : Gather the package facts ------------- 0.84s /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : Stop and disable service ------------- 0.82s /tmp/collections-IyT/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12