ansible-playbook [core 2.16.13] config file = None configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/local/lib/python3.12/site-packages/ansible ansible collection location = /tmp/collections-ea9 executable location = /usr/local/bin/ansible-playbook python version = 3.12.1 (main, Feb 21 2024, 14:18:26) [GCC 8.5.0 20210514 (Red Hat 8.5.0-21)] (/usr/bin/python3.12) jinja version = 3.1.4 libyaml = True No config file found; using defaults running playbook inside collection fedora.linux_system_roles redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.profile_tasks to ansible.posix.profile_tasks Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_quadlet_pod.yml ************************************************ 2 plays in /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml PLAY [all] ********************************************************************* TASK [Include vault variables] ************************************************* task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:5 Tuesday 05 November 2024 18:18:38 -0500 (0:00:00.008) 0:00:00.008 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_test_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n35383939616163653333633431363463313831383037386236646138333162396161356130303461\n3932623930643263313563336163316337643562333936360a363538636631313039343233383732\n38666530383538656639363465313230343533386130303833336434303438333161656262346562\n3362626538613031640a663330613638366132356534363534353239616666653466353961323533\n6565\n" }, "mysql_container_root_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n61333932373230333539663035366431326163363166363036323963623131363530326231303634\n6635326161643165363366323062333334363730376631660a393566366139353861656364656661\n38653463363837336639363032646433666361646535366137303464623261313663643336306465\n6264663730656337310a343962353137386238383064646533366433333437303566656433386233\n34343235326665646661623131643335313236313131353661386338343366316261643634653633\n3832313034366536616531323963333234326461353130303532\n" } }, "ansible_included_var_files": [ "/tmp/podman-Qwn/tests/vars/vault-variables.yml" ], "changed": false } PLAY [Ensure that the role can manage quadlet pods] **************************** TASK [Gathering Facts] ********************************************************* task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:9 Tuesday 05 November 2024 18:18:38 -0500 (0:00:00.021) 0:00:00.029 ****** ok: [managed-node2] TASK [Run the role - root] ***************************************************** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:34 Tuesday 05 November 2024 18:18:39 -0500 (0:00:01.186) 0:00:01.216 ****** TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Tuesday 05 November 2024 18:18:39 -0500 (0:00:00.060) 0:00:01.276 ****** included: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Tuesday 05 November 2024 18:18:39 -0500 (0:00:00.027) 0:00:01.304 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Tuesday 05 November 2024 18:18:39 -0500 (0:00:00.041) 0:00:01.345 ****** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Tuesday 05 November 2024 18:18:40 -0500 (0:00:00.475) 0:00:01.820 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Tuesday 05 November 2024 18:18:40 -0500 (0:00:00.030) 0:00:01.851 ****** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Tuesday 05 November 2024 18:18:40 -0500 (0:00:00.358) 0:00:02.209 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Tuesday 05 November 2024 18:18:40 -0500 (0:00:00.024) 0:00:02.233 ****** ok: [managed-node2] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [managed-node2] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Tuesday 05 November 2024 18:18:40 -0500 (0:00:00.047) 0:00:02.281 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Tuesday 05 November 2024 18:18:42 -0500 (0:00:01.738) 0:00:04.020 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Tuesday 05 November 2024 18:18:42 -0500 (0:00:00.052) 0:00:04.073 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Tuesday 05 November 2024 18:18:42 -0500 (0:00:00.055) 0:00:04.129 ****** skipping: [managed-node2] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Tuesday 05 November 2024 18:18:42 -0500 (0:00:00.055) 0:00:04.184 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Tuesday 05 November 2024 18:18:42 -0500 (0:00:00.052) 0:00:04.237 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Tuesday 05 November 2024 18:18:42 -0500 (0:00:00.052) 0:00:04.289 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.030244", "end": "2024-11-05 18:18:43.227516", "rc": 0, "start": "2024-11-05 18:18:43.197272" } STDOUT: podman version 4.9.4-dev TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Tuesday 05 November 2024 18:18:43 -0500 (0:00:00.512) 0:00:04.802 ****** ok: [managed-node2] => { "ansible_facts": { "podman_version": "4.9.4-dev" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Tuesday 05 November 2024 18:18:43 -0500 (0:00:00.052) 0:00:04.855 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Tuesday 05 November 2024 18:18:43 -0500 (0:00:00.055) 0:00:04.910 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Tuesday 05 November 2024 18:18:43 -0500 (0:00:00.086) 0:00:04.997 ****** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Tuesday 05 November 2024 18:18:43 -0500 (0:00:00.095) 0:00:05.092 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_fail_if_too_old | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Tuesday 05 November 2024 18:18:43 -0500 (0:00:00.079) 0:00:05.171 ****** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Tuesday 05 November 2024 18:18:43 -0500 (0:00:00.104) 0:00:05.275 ****** included: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Tuesday 05 November 2024 18:18:43 -0500 (0:00:00.076) 0:00:05.352 ****** ok: [managed-node2] => { "ansible_facts": { "getent_passwd": { "root": [ "x", "0", "0", "root", "/root", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Tuesday 05 November 2024 18:18:44 -0500 (0:00:00.491) 0:00:05.843 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Tuesday 05 November 2024 18:18:44 -0500 (0:00:00.056) 0:00:05.900 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Tuesday 05 November 2024 18:18:44 -0500 (0:00:00.069) 0:00:05.969 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1730848364.6501715, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1730848334.8832226, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986657, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "481438935", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Tuesday 05 November 2024 18:18:44 -0500 (0:00:00.425) 0:00:06.394 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Tuesday 05 November 2024 18:18:44 -0500 (0:00:00.054) 0:00:06.449 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Tuesday 05 November 2024 18:18:45 -0500 (0:00:00.052) 0:00:06.502 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Tuesday 05 November 2024 18:18:45 -0500 (0:00:00.039) 0:00:06.541 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Tuesday 05 November 2024 18:18:45 -0500 (0:00:00.038) 0:00:06.580 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Tuesday 05 November 2024 18:18:45 -0500 (0:00:00.034) 0:00:06.614 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Tuesday 05 November 2024 18:18:45 -0500 (0:00:00.039) 0:00:06.653 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Tuesday 05 November 2024 18:18:45 -0500 (0:00:00.034) 0:00:06.688 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Tuesday 05 November 2024 18:18:45 -0500 (0:00:00.035) 0:00:06.724 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Tuesday 05 November 2024 18:18:45 -0500 (0:00:00.087) 0:00:06.812 ****** included: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Tuesday 05 November 2024 18:18:45 -0500 (0:00:00.108) 0:00:06.921 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Tuesday 05 November 2024 18:18:45 -0500 (0:00:00.040) 0:00:06.961 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Tuesday 05 November 2024 18:18:45 -0500 (0:00:00.036) 0:00:06.998 ****** included: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Tuesday 05 November 2024 18:18:45 -0500 (0:00:00.102) 0:00:07.100 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Tuesday 05 November 2024 18:18:45 -0500 (0:00:00.064) 0:00:07.164 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Tuesday 05 November 2024 18:18:45 -0500 (0:00:00.052) 0:00:07.217 ****** included: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Tuesday 05 November 2024 18:18:45 -0500 (0:00:00.078) 0:00:07.296 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Tuesday 05 November 2024 18:18:45 -0500 (0:00:00.045) 0:00:07.341 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Tuesday 05 November 2024 18:18:45 -0500 (0:00:00.045) 0:00:07.386 ****** included: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Tuesday 05 November 2024 18:18:45 -0500 (0:00:00.071) 0:00:07.458 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Tuesday 05 November 2024 18:18:46 -0500 (0:00:00.032) 0:00:07.491 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Tuesday 05 November 2024 18:18:46 -0500 (0:00:00.033) 0:00:07.524 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Tuesday 05 November 2024 18:18:46 -0500 (0:00:00.037) 0:00:07.562 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Tuesday 05 November 2024 18:18:46 -0500 (0:00:00.050) 0:00:07.612 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Tuesday 05 November 2024 18:18:46 -0500 (0:00:00.054) 0:00:07.667 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Tuesday 05 November 2024 18:18:46 -0500 (0:00:00.061) 0:00:07.728 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Tuesday 05 November 2024 18:18:46 -0500 (0:00:00.098) 0:00:07.826 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Tuesday 05 November 2024 18:18:46 -0500 (0:00:00.047) 0:00:07.874 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Tuesday 05 November 2024 18:18:46 -0500 (0:00:00.035) 0:00:07.909 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Tuesday 05 November 2024 18:18:46 -0500 (0:00:00.046) 0:00:07.955 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Tuesday 05 November 2024 18:18:46 -0500 (0:00:00.036) 0:00:07.992 ****** included: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Tuesday 05 November 2024 18:18:46 -0500 (0:00:00.108) 0:00:08.100 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Pod": { "PodName": "quadlet-pod" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Tuesday 05 November 2024 18:18:46 -0500 (0:00:00.045) 0:00:08.146 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Tuesday 05 November 2024 18:18:46 -0500 (0:00:00.043) 0:00:08.190 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Tuesday 05 November 2024 18:18:46 -0500 (0:00:00.034) 0:00:08.224 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-pod", "__podman_quadlet_type": "pod", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Tuesday 05 November 2024 18:18:46 -0500 (0:00:00.054) 0:00:08.279 ****** included: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Tuesday 05 November 2024 18:18:46 -0500 (0:00:00.095) 0:00:08.374 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Tuesday 05 November 2024 18:18:46 -0500 (0:00:00.043) 0:00:08.417 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Tuesday 05 November 2024 18:18:46 -0500 (0:00:00.049) 0:00:08.467 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Tuesday 05 November 2024 18:18:47 -0500 (0:00:00.079) 0:00:08.546 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1730848364.6501715, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1730848334.8832226, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986657, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "481438935", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Tuesday 05 November 2024 18:18:47 -0500 (0:00:00.413) 0:00:08.960 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Tuesday 05 November 2024 18:18:47 -0500 (0:00:00.047) 0:00:09.008 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Tuesday 05 November 2024 18:18:47 -0500 (0:00:00.102) 0:00:09.110 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Tuesday 05 November 2024 18:18:47 -0500 (0:00:00.049) 0:00:09.160 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Tuesday 05 November 2024 18:18:47 -0500 (0:00:00.049) 0:00:09.210 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Tuesday 05 November 2024 18:18:47 -0500 (0:00:00.049) 0:00:09.259 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Tuesday 05 November 2024 18:18:47 -0500 (0:00:00.050) 0:00:09.310 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Tuesday 05 November 2024 18:18:47 -0500 (0:00:00.051) 0:00:09.362 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Tuesday 05 November 2024 18:18:47 -0500 (0:00:00.048) 0:00:09.410 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-pod-pod.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Tuesday 05 November 2024 18:18:48 -0500 (0:00:00.087) 0:00:09.498 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Tuesday 05 November 2024 18:18:48 -0500 (0:00:00.056) 0:00:09.554 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Tuesday 05 November 2024 18:18:48 -0500 (0:00:00.056) 0:00:09.611 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-pod-pod.pod", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Tuesday 05 November 2024 18:18:48 -0500 (0:00:00.120) 0:00:09.731 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Tuesday 05 November 2024 18:18:48 -0500 (0:00:00.061) 0:00:09.793 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Tuesday 05 November 2024 18:18:48 -0500 (0:00:00.049) 0:00:09.843 ****** included: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Tuesday 05 November 2024 18:18:48 -0500 (0:00:00.116) 0:00:09.959 ****** included: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Tuesday 05 November 2024 18:18:48 -0500 (0:00:00.110) 0:00:10.070 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Tuesday 05 November 2024 18:18:48 -0500 (0:00:00.066) 0:00:10.137 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Tuesday 05 November 2024 18:18:48 -0500 (0:00:00.053) 0:00:10.191 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Tuesday 05 November 2024 18:18:48 -0500 (0:00:00.237) 0:00:10.428 ****** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Tuesday 05 November 2024 18:18:49 -0500 (0:00:00.067) 0:00:10.495 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Tuesday 05 November 2024 18:18:49 -0500 (0:00:00.073) 0:00:10.568 ****** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Tuesday 05 November 2024 18:18:49 -0500 (0:00:00.645) 0:00:11.214 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Tuesday 05 November 2024 18:18:49 -0500 (0:00:00.072) 0:00:11.287 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Tuesday 05 November 2024 18:18:49 -0500 (0:00:00.055) 0:00:11.342 ****** changed: [managed-node2] => { "changed": true, "checksum": "1884c880482430d8bf2e944b003734fb8b7a462d", "dest": "/etc/containers/systemd/quadlet-pod-pod.pod", "gid": 0, "group": "root", "md5sum": "43c9e9c2ff3ad9cd27c1f2d12f03aee0", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 70, "src": "/root/.ansible/tmp/ansible-tmp-1730848729.9123697-24295-180984615223804/source", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Tuesday 05 November 2024 18:18:50 -0500 (0:00:00.969) 0:00:12.312 ****** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Tuesday 05 November 2024 18:18:51 -0500 (0:00:01.060) 0:00:13.372 ****** fatal: [managed-node2]: FAILED! => { "changed": false } MSG: Could not find the requested service quadlet-pod-pod-pod.service: host TASK [Debug3] ****************************************************************** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:127 Tuesday 05 November 2024 18:18:52 -0500 (0:00:00.624) 0:00:13.997 ****** ok: [managed-node2] => { "changed": false, "cmd": "set -x\nset -o pipefail\nexec 1>&2\n#podman volume rm --all\n#podman network prune -f\npodman volume ls\npodman network ls\npodman secret ls\npodman container ls\npodman pod ls\npodman images\nsystemctl list-units | grep quadlet\nsystemctl list-unit-files | grep quadlet\nls -alrtF /etc/containers/systemd\n/usr/libexec/podman/quadlet -dryrun -v -no-kmsg-log\n", "delta": "0:00:00.656841", "end": "2024-11-05 18:18:53.481232", "rc": 0, "start": "2024-11-05 18:18:52.824391" } STDERR: + set -o pipefail + exec + podman volume ls + podman network ls NETWORK ID NAME DRIVER 2f259bab93aa podman bridge a4dcf21f020e podman-default-kube-network bridge + podman secret ls ID NAME DRIVER CREATED UPDATED + podman container ls CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES + podman pod ls POD ID NAME STATUS CREATED INFRA ID # OF CONTAINERS + podman images REPOSITORY TAG IMAGE ID CREATED SIZE localhost/podman-pause 4.9.4-dev-1708535009 44161fa65517 5 minutes ago 769 kB quay.io/libpod/testimage 20210610 9f9ec7f2fdef 3 years ago 7.99 MB + systemctl list-units + grep quadlet + systemctl list-unit-files + grep quadlet + ls -alrtF /etc/containers/systemd total 4 drwxr-xr-x. 8 root root 162 Nov 5 18:18 ../ -rw-r--r--. 1 root root 70 Nov 5 18:18 quadlet-pod-pod.pod drwxr-xr-x. 2 root root 33 Nov 5 18:18 ./ + /usr/libexec/podman/quadlet -dryrun -v -no-kmsg-log quadlet-generator[50605]: No files parsed from [/etc/containers/systemd /usr/share/containers/systemd] TASK [Check AVCs] ************************************************************** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:146 Tuesday 05 November 2024 18:18:53 -0500 (0:00:01.101) 0:00:15.099 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "grep", "type=AVC", "/var/log/audit/audit.log" ], "delta": "0:00:00.005534", "end": "2024-11-05 18:18:53.971775", "failed_when_result": false, "rc": 1, "start": "2024-11-05 18:18:53.966241" } MSG: non-zero return code TASK [Dump journal] ************************************************************ task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:151 Tuesday 05 November 2024 18:18:54 -0500 (0:00:00.481) 0:00:15.580 ****** fatal: [managed-node2]: FAILED! => { "changed": false, "cmd": [ "journalctl", "-ex" ], "delta": "0:00:00.028495", "end": "2024-11-05 18:18:54.493695", "failed_when_result": true, "rc": 0, "start": "2024-11-05 18:18:54.465200" } STDOUT: -- Logs begin at Tue 2024-11-05 18:07:21 EST, end at Tue 2024-11-05 18:18:54 EST. -- Nov 05 18:09:39 ip-10-31-8-209.us-east-1.aws.redhat.com sshd[5113]: Accepted publickey for root from 10.31.11.156 port 42166 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Nov 05 18:09:39 ip-10-31-8-209.us-east-1.aws.redhat.com sshd[5114]: Accepted publickey for root from 10.31.11.156 port 42178 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Nov 05 18:09:39 ip-10-31-8-209.us-east-1.aws.redhat.com systemd[1]: Started Session 4 of user root. -- Subject: Unit session-4.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit session-4.scope has finished starting up. -- -- The start-up result is done. Nov 05 18:09:39 ip-10-31-8-209.us-east-1.aws.redhat.com systemd-logind[606]: New session 4 of user root. -- Subject: A new session 4 has been created for user root -- Defined-By: systemd -- Support: https://access.redhat.com/support -- Documentation: https://www.freedesktop.org/wiki/Software/systemd/multiseat -- -- A new session with the ID 4 has been created for the user root. -- -- The leading process of the session is 5114. Nov 05 18:09:39 ip-10-31-8-209.us-east-1.aws.redhat.com systemd-logind[606]: New session 3 of user root. -- Subject: A new session 3 has been created for user root -- Defined-By: systemd -- Support: https://access.redhat.com/support -- Documentation: https://www.freedesktop.org/wiki/Software/systemd/multiseat -- -- A new session with the ID 3 has been created for the user root. -- -- The leading process of the session is 5113. Nov 05 18:09:39 ip-10-31-8-209.us-east-1.aws.redhat.com systemd[1]: Started Session 3 of user root. -- Subject: Unit session-3.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit session-3.scope has finished starting up. -- -- The start-up result is done. Nov 05 18:09:39 ip-10-31-8-209.us-east-1.aws.redhat.com sshd[5114]: pam_unix(sshd:session): session opened for user root by (uid=0) Nov 05 18:09:39 ip-10-31-8-209.us-east-1.aws.redhat.com sshd[5113]: pam_unix(sshd:session): session opened for user root by (uid=0) Nov 05 18:09:39 ip-10-31-8-209.us-east-1.aws.redhat.com sshd[5119]: Received disconnect from 10.31.11.156 port 42178:11: disconnected by user Nov 05 18:09:39 ip-10-31-8-209.us-east-1.aws.redhat.com sshd[5119]: Disconnected from user root 10.31.11.156 port 42178 Nov 05 18:09:39 ip-10-31-8-209.us-east-1.aws.redhat.com sshd[5114]: pam_unix(sshd:session): session closed for user root Nov 05 18:09:39 ip-10-31-8-209.us-east-1.aws.redhat.com systemd[1]: session-4.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit session-4.scope has successfully entered the 'dead' state. Nov 05 18:09:39 ip-10-31-8-209.us-east-1.aws.redhat.com systemd-logind[606]: Session 4 logged out. Waiting for processes to exit. Nov 05 18:09:39 ip-10-31-8-209.us-east-1.aws.redhat.com systemd-logind[606]: Removed session 4. -- Subject: Session 4 has been terminated -- Defined-By: systemd -- Support: https://access.redhat.com/support -- Documentation: https://www.freedesktop.org/wiki/Software/systemd/multiseat -- -- A session with the ID 4 has been terminated. Nov 05 18:10:34 ip-10-31-8-209.us-east-1.aws.redhat.com dbus-daemon[608]: [system] Activating via systemd: service name='org.freedesktop.hostname1' unit='dbus-org.freedesktop.hostname1.service' requested by ':1.24' (uid=0 pid=6543 comm="hostnamectl set-hostname managed-node2 " label="unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023") Nov 05 18:10:34 ip-10-31-8-209.us-east-1.aws.redhat.com systemd[1]: Starting Hostname Service... -- Subject: Unit systemd-hostnamed.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-hostnamed.service has begun starting up. Nov 05 18:10:34 ip-10-31-8-209.us-east-1.aws.redhat.com dbus-daemon[608]: [system] Successfully activated service 'org.freedesktop.hostname1' Nov 05 18:10:34 ip-10-31-8-209.us-east-1.aws.redhat.com systemd[1]: Started Hostname Service. -- Subject: Unit systemd-hostnamed.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-hostnamed.service has finished starting up. -- -- The start-up result is done. Nov 05 18:10:34 managed-node2 systemd-hostnamed[6544]: Changed static host name to 'managed-node2' Nov 05 18:10:34 managed-node2 systemd-hostnamed[6544]: Changed host name to 'managed-node2' Nov 05 18:10:34 managed-node2 NetworkManager[673]: [1730848234.7805] hostname: static hostname changed from "ip-10-31-8-209.us-east-1.aws.redhat.com" to "managed-node2" Nov 05 18:10:34 managed-node2 dbus-daemon[608]: [system] Activating via systemd: service name='org.freedesktop.nm_dispatcher' unit='dbus-org.freedesktop.nm-dispatcher.service' requested by ':1.5' (uid=0 pid=673 comm="/usr/sbin/NetworkManager --no-daemon " label="system_u:system_r:NetworkManager_t:s0") Nov 05 18:10:34 managed-node2 NetworkManager[673]: [1730848234.7856] policy: set-hostname: set hostname to 'managed-node2' (from system configuration) Nov 05 18:10:34 managed-node2 systemd[1]: Starting Network Manager Script Dispatcher Service... -- Subject: Unit NetworkManager-dispatcher.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit NetworkManager-dispatcher.service has begun starting up. Nov 05 18:10:34 managed-node2 dbus-daemon[608]: [system] Successfully activated service 'org.freedesktop.nm_dispatcher' Nov 05 18:10:34 managed-node2 systemd[1]: Started Network Manager Script Dispatcher Service. -- Subject: Unit NetworkManager-dispatcher.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit NetworkManager-dispatcher.service has finished starting up. -- -- The start-up result is done. Nov 05 18:10:44 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Nov 05 18:11:04 managed-node2 systemd[1]: systemd-hostnamed.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit systemd-hostnamed.service has successfully entered the 'dead' state. Nov 05 18:11:21 managed-node2 sshd[7104]: Accepted publickey for root from 10.31.9.229 port 50604 ssh2: RSA SHA256:9j1blwt3wcrRiGYZQ7ZGu9axm3cDklH6/z4c+Ee8CzE Nov 05 18:11:21 managed-node2 systemd[1]: Started Session 5 of user root. -- Subject: Unit session-5.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit session-5.scope has finished starting up. -- -- The start-up result is done. Nov 05 18:11:21 managed-node2 systemd-logind[606]: New session 5 of user root. -- Subject: A new session 5 has been created for user root -- Defined-By: systemd -- Support: https://access.redhat.com/support -- Documentation: https://www.freedesktop.org/wiki/Software/systemd/multiseat -- -- A new session with the ID 5 has been created for the user root. -- -- The leading process of the session is 7104. Nov 05 18:11:21 managed-node2 sshd[7104]: pam_unix(sshd:session): session opened for user root by (uid=0) Nov 05 18:11:22 managed-node2 platform-python[7249]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Nov 05 18:11:23 managed-node2 platform-python[7401]: ansible-tempfile Invoked with state=directory prefix=lsr_ suffix=_podman path=None Nov 05 18:11:23 managed-node2 platform-python[7524]: ansible-file Invoked with path=/tmp/lsr_g8amvhe2_podman mode=0777 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False state=None _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:11:24 managed-node2 platform-python[7647]: ansible-user Invoked with name=podman_basic_user uid=3001 state=present non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node2 update_password=always group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Nov 05 18:11:24 managed-node2 useradd[7652]: new group: name=podman_basic_user, GID=3001 Nov 05 18:11:24 managed-node2 useradd[7652]: new user: name=podman_basic_user, UID=3001, GID=3001, home=/home/podman_basic_user, shell=/bin/bash Nov 05 18:11:25 managed-node2 platform-python[7780]: ansible-file Invoked with path=/tmp/lsr_g8amvhe2_podman/httpd1 state=directory mode=0755 owner=podman_basic_user recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:11:26 managed-node2 platform-python[7903]: ansible-file Invoked with path=/tmp/lsr_g8amvhe2_podman/httpd2 state=directory mode=0755 owner=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:11:26 managed-node2 platform-python[8026]: ansible-file Invoked with path=/tmp/lsr_g8amvhe2_podman/httpd3 state=directory mode=0755 owner=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:11:27 managed-node2 platform-python[8149]: ansible-ansible.legacy.stat Invoked with path=/tmp/lsr_g8amvhe2_podman/httpd1/index.txt follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 05 18:11:27 managed-node2 platform-python[8248]: ansible-ansible.legacy.copy Invoked with dest=/tmp/lsr_g8amvhe2_podman/httpd1/index.txt mode=0644 owner=podman_basic_user src=/root/.ansible/tmp/ansible-tmp-1730848286.7627668-8115-228138009144856/source _original_basename=tmplad1_5np follow=False checksum=40bd001563085fc35165329ea1ff5c5ecbdbbeef backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:11:27 managed-node2 platform-python[8373]: ansible-ansible.legacy.stat Invoked with path=/tmp/lsr_g8amvhe2_podman/httpd2/index.txt follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 05 18:11:28 managed-node2 platform-python[8472]: ansible-ansible.legacy.copy Invoked with dest=/tmp/lsr_g8amvhe2_podman/httpd2/index.txt mode=0644 owner=root src=/root/.ansible/tmp/ansible-tmp-1730848287.42335-8115-164404541568213/source _original_basename=tmpp701t54j follow=False checksum=40bd001563085fc35165329ea1ff5c5ecbdbbeef backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:11:28 managed-node2 platform-python[8597]: ansible-ansible.legacy.stat Invoked with path=/tmp/lsr_g8amvhe2_podman/httpd3/index.txt follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 05 18:11:28 managed-node2 platform-python[8696]: ansible-ansible.legacy.copy Invoked with dest=/tmp/lsr_g8amvhe2_podman/httpd3/index.txt mode=0644 owner=root src=/root/.ansible/tmp/ansible-tmp-1730848288.1145093-8115-274974049818412/source _original_basename=tmppecs7fz4 follow=False checksum=40bd001563085fc35165329ea1ff5c5ecbdbbeef backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:11:29 managed-node2 platform-python[8821]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:11:29 managed-node2 platform-python[8944]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:11:31 managed-node2 sudo[9192]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lazhzsjnsutgnvvvsggreuzwbaryvavn ; /usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1730848291.5510366-8226-41060908024907/AnsiballZ_dnf.py' Nov 05 18:11:31 managed-node2 sudo[9192]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 05 18:11:32 managed-node2 platform-python[9195]: ansible-ansible.legacy.dnf Invoked with name=['crun', 'podman', 'podman-plugins', 'shadow-utils-subid'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Nov 05 18:11:50 managed-node2 dbus-daemon[608]: [system] Reloaded configuration Nov 05 18:11:51 managed-node2 setsebool[9267]: The virt_use_nfs policy boolean was changed to 1 by root Nov 05 18:11:51 managed-node2 setsebool[9267]: The virt_sandbox_use_all_caps policy boolean was changed to 1 by root Nov 05 18:12:14 managed-node2 kernel: SELinux: Converting 367 SID table entries... Nov 05 18:12:14 managed-node2 kernel: SELinux: policy capability network_peer_controls=1 Nov 05 18:12:14 managed-node2 kernel: SELinux: policy capability open_perms=1 Nov 05 18:12:14 managed-node2 kernel: SELinux: policy capability extended_socket_class=1 Nov 05 18:12:14 managed-node2 kernel: SELinux: policy capability always_check_network=0 Nov 05 18:12:14 managed-node2 kernel: SELinux: policy capability cgroup_seclabel=1 Nov 05 18:12:14 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1 Nov 05 18:12:14 managed-node2 dbus-daemon[608]: [system] Reloaded configuration Nov 05 18:12:14 managed-node2 kernel: fuse: init (API version 7.34) Nov 05 18:12:14 managed-node2 systemd[1]: Mounting FUSE Control File System... -- Subject: Unit sys-fs-fuse-connections.mount has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit sys-fs-fuse-connections.mount has begun starting up. Nov 05 18:12:14 managed-node2 systemd[1]: Mounted FUSE Control File System. -- Subject: Unit sys-fs-fuse-connections.mount has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit sys-fs-fuse-connections.mount has finished starting up. -- -- The start-up result is done. Nov 05 18:12:15 managed-node2 dbus-daemon[608]: [system] Reloaded configuration Nov 05 18:12:15 managed-node2 dbus-daemon[608]: [system] Reloaded configuration Nov 05 18:12:41 managed-node2 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update. -- Subject: Unit run-r5cc894cf27934cd781acbb9b5b13d870.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit run-r5cc894cf27934cd781acbb9b5b13d870.service has finished starting up. -- -- The start-up result is done. Nov 05 18:12:41 managed-node2 systemd[1]: cgroup compatibility translation between legacy and unified hierarchy settings activated. See cgroup-compat debug messages for details. Nov 05 18:12:41 managed-node2 systemd[1]: Starting man-db-cache-update.service... -- Subject: Unit man-db-cache-update.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has begun starting up. Nov 05 18:12:41 managed-node2 systemd[1]: Reloading. Nov 05 18:12:42 managed-node2 sudo[9192]: pam_unix(sudo:session): session closed for user root Nov 05 18:12:42 managed-node2 systemd[1]: man-db-cache-update.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit man-db-cache-update.service has successfully entered the 'dead' state. Nov 05 18:12:42 managed-node2 systemd[1]: Started man-db-cache-update.service. -- Subject: Unit man-db-cache-update.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has finished starting up. -- -- The start-up result is done. Nov 05 18:12:42 managed-node2 systemd[1]: run-r5cc894cf27934cd781acbb9b5b13d870.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-r5cc894cf27934cd781acbb9b5b13d870.service has successfully entered the 'dead' state. Nov 05 18:12:43 managed-node2 platform-python[11747]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:12:44 managed-node2 platform-python[11876]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Nov 05 18:12:44 managed-node2 platform-python[12000]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:12:46 managed-node2 platform-python[12125]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:12:47 managed-node2 platform-python[12248]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:12:47 managed-node2 platform-python[12371]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Nov 05 18:12:51 managed-node2 platform-python[12495]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Nov 05 18:12:51 managed-node2 platform-python[12622]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Nov 05 18:12:51 managed-node2 systemd[1]: Reloading. Nov 05 18:12:52 managed-node2 systemd[1]: Starting firewalld - dynamic firewall daemon... -- Subject: Unit firewalld.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit firewalld.service has begun starting up. Nov 05 18:12:52 managed-node2 systemd[1]: Started firewalld - dynamic firewall daemon. -- Subject: Unit firewalld.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit firewalld.service has finished starting up. -- -- The start-up result is done. Nov 05 18:12:52 managed-node2 firewalld[12658]: WARNING: AllowZoneDrifting is enabled. This is considered an insecure configuration option. It will be removed in a future release. Please consider disabling it now. Nov 05 18:12:53 managed-node2 platform-python[12844]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Nov 05 18:12:54 managed-node2 platform-python[12967]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:12:55 managed-node2 platform-python[13090]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:12:55 managed-node2 platform-python[13213]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Nov 05 18:12:58 managed-node2 platform-python[13337]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Nov 05 18:13:01 managed-node2 dbus-daemon[608]: [system] Reloaded configuration Nov 05 18:13:01 managed-node2 dbus-daemon[608]: [system] Reloaded configuration Nov 05 18:13:01 managed-node2 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update. -- Subject: Unit run-r27828b07344848f1840b5e7103b81103.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit run-r27828b07344848f1840b5e7103b81103.service has finished starting up. -- -- The start-up result is done. Nov 05 18:13:01 managed-node2 systemd[1]: Starting man-db-cache-update.service... -- Subject: Unit man-db-cache-update.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has begun starting up. Nov 05 18:13:02 managed-node2 systemd[1]: man-db-cache-update.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit man-db-cache-update.service has successfully entered the 'dead' state. Nov 05 18:13:02 managed-node2 systemd[1]: Started man-db-cache-update.service. -- Subject: Unit man-db-cache-update.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has finished starting up. -- -- The start-up result is done. Nov 05 18:13:02 managed-node2 systemd[1]: run-r27828b07344848f1840b5e7103b81103.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-r27828b07344848f1840b5e7103b81103.service has successfully entered the 'dead' state. Nov 05 18:13:02 managed-node2 platform-python[13943]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Nov 05 18:13:04 managed-node2 platform-python[14095]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Nov 05 18:13:05 managed-node2 kernel: SELinux: Converting 460 SID table entries... Nov 05 18:13:05 managed-node2 kernel: SELinux: policy capability network_peer_controls=1 Nov 05 18:13:05 managed-node2 kernel: SELinux: policy capability open_perms=1 Nov 05 18:13:05 managed-node2 kernel: SELinux: policy capability extended_socket_class=1 Nov 05 18:13:05 managed-node2 kernel: SELinux: policy capability always_check_network=0 Nov 05 18:13:05 managed-node2 kernel: SELinux: policy capability cgroup_seclabel=1 Nov 05 18:13:05 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1 Nov 05 18:13:05 managed-node2 dbus-daemon[608]: [system] Reloaded configuration Nov 05 18:13:06 managed-node2 platform-python[14222]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Nov 05 18:13:12 managed-node2 platform-python[14345]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:13:14 managed-node2 platform-python[14470]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:13:14 managed-node2 platform-python[14593]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:13:15 managed-node2 platform-python[14716]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 05 18:13:15 managed-node2 platform-python[14815]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/nopull.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1730848395.0965078-10261-111374756394232/source _original_basename=tmp_zs8cgvp follow=False checksum=d5dc917e3cae36de03aa971a17ac473f86fdf934 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:13:16 managed-node2 platform-python[14940]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Nov 05 18:13:16 managed-node2 kernel: evm: overlay not supported Nov 05 18:13:16 managed-node2 systemd[1]: Created slice machine.slice. -- Subject: Unit machine.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine.slice has finished starting up. -- -- The start-up result is done. Nov 05 18:13:16 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_9f37c84b4fe95f85b45735f7818a3ef6f59ed5043588390c09e813bf7a1996a9.slice. -- Subject: Unit machine-libpod_pod_9f37c84b4fe95f85b45735f7818a3ef6f59ed5043588390c09e813bf7a1996a9.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_9f37c84b4fe95f85b45735f7818a3ef6f59ed5043588390c09e813bf7a1996a9.slice has finished starting up. -- -- The start-up result is done. Nov 05 18:13:17 managed-node2 systemd[1]: var-lib-containers-storage-overlay-736cb85319b9e80b3f115d6b632073e3f708d131051d613e3c0cc3ce5a35fcd5-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-736cb85319b9e80b3f115d6b632073e3f708d131051d613e3c0cc3ce5a35fcd5-merged.mount has successfully entered the 'dead' state. Nov 05 18:13:17 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Nov 05 18:13:22 managed-node2 platform-python[15266]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:13:24 managed-node2 platform-python[15395]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:13:27 managed-node2 platform-python[15520]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Nov 05 18:13:31 managed-node2 platform-python[15644]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Nov 05 18:13:31 managed-node2 platform-python[15771]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Nov 05 18:13:32 managed-node2 platform-python[15898]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Nov 05 18:13:35 managed-node2 platform-python[16021]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Nov 05 18:13:38 managed-node2 platform-python[16145]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Nov 05 18:13:41 managed-node2 platform-python[16269]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Nov 05 18:13:43 managed-node2 platform-python[16421]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Nov 05 18:13:44 managed-node2 platform-python[16544]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Nov 05 18:13:49 managed-node2 platform-python[16667]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:13:52 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Nov 05 18:13:53 managed-node2 platform-python[16930]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:13:54 managed-node2 platform-python[17053]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:13:54 managed-node2 platform-python[17176]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 05 18:13:55 managed-node2 platform-python[17275]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/bogus.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1730848434.465947-12145-280219148361602/source _original_basename=tmpj4okwe2v follow=False checksum=f8266a972ed3be7e204d2a67883fe3a22b8dbf18 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:13:55 managed-node2 platform-python[17400]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Nov 05 18:13:55 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_3cd0b242e00a565d25ab3542e16a82303af919365ac2ca6ead026022e18c2521.slice. -- Subject: Unit machine-libpod_pod_3cd0b242e00a565d25ab3542e16a82303af919365ac2ca6ead026022e18c2521.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_3cd0b242e00a565d25ab3542e16a82303af919365ac2ca6ead026022e18c2521.slice has finished starting up. -- -- The start-up result is done. Nov 05 18:13:55 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Nov 05 18:13:59 managed-node2 platform-python[17687]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:14:01 managed-node2 platform-python[17816]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:14:05 managed-node2 platform-python[17941]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Nov 05 18:14:08 managed-node2 platform-python[18065]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Nov 05 18:14:09 managed-node2 platform-python[18192]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Nov 05 18:14:10 managed-node2 platform-python[18319]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Nov 05 18:14:12 managed-node2 platform-python[18442]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Nov 05 18:14:16 managed-node2 platform-python[18566]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Nov 05 18:14:19 managed-node2 platform-python[18690]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Nov 05 18:14:22 managed-node2 platform-python[18842]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Nov 05 18:14:23 managed-node2 platform-python[18965]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Nov 05 18:14:29 managed-node2 platform-python[19088]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:14:30 managed-node2 platform-python[19213]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/nopull.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:14:31 managed-node2 platform-python[19337]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-nopull.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Nov 05 18:14:32 managed-node2 platform-python[19464]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:14:32 managed-node2 platform-python[19589]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Nov 05 18:14:32 managed-node2 platform-python[19589]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/nopull.yml Nov 05 18:14:32 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_9f37c84b4fe95f85b45735f7818a3ef6f59ed5043588390c09e813bf7a1996a9.slice. -- Subject: Unit machine-libpod_pod_9f37c84b4fe95f85b45735f7818a3ef6f59ed5043588390c09e813bf7a1996a9.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_9f37c84b4fe95f85b45735f7818a3ef6f59ed5043588390c09e813bf7a1996a9.slice has finished shutting down. Nov 05 18:14:32 managed-node2 systemd[1]: machine-libpod_pod_9f37c84b4fe95f85b45735f7818a3ef6f59ed5043588390c09e813bf7a1996a9.slice: Consumed 0 CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit machine-libpod_pod_9f37c84b4fe95f85b45735f7818a3ef6f59ed5043588390c09e813bf7a1996a9.slice completed and consumed the indicated resources. Nov 05 18:14:32 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Nov 05 18:14:32 managed-node2 platform-python[19727]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:14:33 managed-node2 platform-python[19850]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:14:36 managed-node2 platform-python[20105]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:14:38 managed-node2 platform-python[20234]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:14:41 managed-node2 platform-python[20359]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Nov 05 18:14:45 managed-node2 platform-python[20483]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Nov 05 18:14:46 managed-node2 platform-python[20610]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Nov 05 18:14:47 managed-node2 platform-python[20737]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Nov 05 18:14:50 managed-node2 platform-python[20860]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Nov 05 18:14:54 managed-node2 platform-python[20984]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Nov 05 18:14:57 managed-node2 platform-python[21108]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Nov 05 18:14:59 managed-node2 platform-python[21260]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Nov 05 18:15:00 managed-node2 platform-python[21383]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Nov 05 18:15:05 managed-node2 platform-python[21506]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:15:07 managed-node2 platform-python[21631]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/bogus.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:15:08 managed-node2 platform-python[21755]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-bogus.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Nov 05 18:15:08 managed-node2 platform-python[21882]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:15:09 managed-node2 platform-python[22007]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Nov 05 18:15:09 managed-node2 platform-python[22007]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/bogus.yml Nov 05 18:15:09 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_3cd0b242e00a565d25ab3542e16a82303af919365ac2ca6ead026022e18c2521.slice. -- Subject: Unit machine-libpod_pod_3cd0b242e00a565d25ab3542e16a82303af919365ac2ca6ead026022e18c2521.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_3cd0b242e00a565d25ab3542e16a82303af919365ac2ca6ead026022e18c2521.slice has finished shutting down. Nov 05 18:15:09 managed-node2 systemd[1]: machine-libpod_pod_3cd0b242e00a565d25ab3542e16a82303af919365ac2ca6ead026022e18c2521.slice: Consumed 0 CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit machine-libpod_pod_3cd0b242e00a565d25ab3542e16a82303af919365ac2ca6ead026022e18c2521.slice completed and consumed the indicated resources. Nov 05 18:15:09 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Nov 05 18:15:10 managed-node2 platform-python[22146]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:15:10 managed-node2 platform-python[22269]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:15:10 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Nov 05 18:15:14 managed-node2 platform-python[22524]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:15:15 managed-node2 platform-python[22653]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:15:18 managed-node2 platform-python[22778]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Nov 05 18:15:22 managed-node2 platform-python[22902]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Nov 05 18:15:22 managed-node2 platform-python[23029]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Nov 05 18:15:23 managed-node2 platform-python[23156]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Nov 05 18:15:25 managed-node2 platform-python[23279]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Nov 05 18:15:28 managed-node2 platform-python[23403]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Nov 05 18:15:31 managed-node2 platform-python[23527]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Nov 05 18:15:33 managed-node2 platform-python[23679]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Nov 05 18:15:34 managed-node2 platform-python[23802]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Nov 05 18:15:39 managed-node2 platform-python[23925]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Nov 05 18:15:40 managed-node2 platform-python[24049]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:15:40 managed-node2 platform-python[24174]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:15:41 managed-node2 platform-python[24298]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:15:42 managed-node2 platform-python[24422]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:15:43 managed-node2 platform-python[24546]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl enable-linger podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Nov 05 18:15:43 managed-node2 systemd[1]: Created slice User Slice of UID 3001. -- Subject: Unit user-3001.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-3001.slice has finished starting up. -- -- The start-up result is done. Nov 05 18:15:43 managed-node2 systemd[1]: Starting User runtime directory /run/user/3001... -- Subject: Unit user-runtime-dir@3001.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-runtime-dir@3001.service has begun starting up. Nov 05 18:15:43 managed-node2 systemd[1]: Started User runtime directory /run/user/3001. -- Subject: Unit user-runtime-dir@3001.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-runtime-dir@3001.service has finished starting up. -- -- The start-up result is done. Nov 05 18:15:43 managed-node2 systemd[1]: Starting User Manager for UID 3001... -- Subject: Unit user@3001.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user@3001.service has begun starting up. Nov 05 18:15:43 managed-node2 systemd[24551]: pam_unix(systemd-user:session): session opened for user podman_basic_user by (uid=0) Nov 05 18:15:43 managed-node2 systemd[24551]: Starting D-Bus User Message Bus Socket. -- Subject: Unit UNIT has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun starting up. Nov 05 18:15:43 managed-node2 systemd[24551]: Reached target Paths. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Nov 05 18:15:43 managed-node2 systemd[24551]: Started Mark boot as successful after the user session has run 2 minutes. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Nov 05 18:15:43 managed-node2 systemd[24551]: Reached target Timers. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Nov 05 18:15:43 managed-node2 systemd[24551]: Listening on D-Bus User Message Bus Socket. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Nov 05 18:15:43 managed-node2 systemd[24551]: Reached target Sockets. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Nov 05 18:15:43 managed-node2 systemd[24551]: Reached target Basic System. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Nov 05 18:15:43 managed-node2 systemd[24551]: Reached target Default. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Nov 05 18:15:43 managed-node2 systemd[24551]: Startup finished in 28ms. -- Subject: User manager start-up is now complete -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The user manager instance for user 3001 has been started. All services queued -- for starting have been started. Note that other services might still be starting -- up or be started at any later time. -- -- Startup of the manager took 28162 microseconds. Nov 05 18:15:43 managed-node2 systemd[1]: Started User Manager for UID 3001. -- Subject: Unit user@3001.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user@3001.service has finished starting up. -- -- The start-up result is done. Nov 05 18:15:44 managed-node2 platform-python[24687]: ansible-file Invoked with path=/tmp/lsr_g8amvhe2_podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:15:44 managed-node2 platform-python[24810]: ansible-file Invoked with path=/tmp/lsr_g8amvhe2_podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:15:45 managed-node2 sudo[24933]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-minjozyxmhtkxuhbzzzthuttzlhyikbp ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1730848544.9590137-16444-115235928362562/AnsiballZ_podman_image.py' Nov 05 18:15:45 managed-node2 sudo[24933]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Nov 05 18:15:45 managed-node2 systemd[24551]: Started D-Bus User Message Bus. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Nov 05 18:15:45 managed-node2 systemd[24551]: Created slice user.slice. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Nov 05 18:15:45 managed-node2 systemd[24551]: Started podman-24945.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Nov 05 18:15:45 managed-node2 systemd[24551]: Started podman-pause-1bcacabc.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Nov 05 18:15:45 managed-node2 systemd[24551]: Started podman-24961.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Nov 05 18:15:46 managed-node2 systemd[24551]: Started podman-24977.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Nov 05 18:15:46 managed-node2 sudo[24933]: pam_unix(sudo:session): session closed for user podman_basic_user Nov 05 18:15:47 managed-node2 platform-python[25106]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:15:47 managed-node2 platform-python[25229]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:15:48 managed-node2 platform-python[25352]: ansible-ansible.legacy.stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 05 18:15:48 managed-node2 platform-python[25451]: ansible-ansible.legacy.copy Invoked with dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml owner=podman_basic_user group=3001 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1730848547.7535005-16572-249696152402714/source _original_basename=tmp92bu_zk_ follow=False checksum=bf5385904808b9919054668e232b814701d22878 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:15:48 managed-node2 sudo[25576]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vshhrcriwkqmmnmbciltezqceeotnfrf ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1730848548.6870902-16623-238731631081587/AnsiballZ_podman_play.py' Nov 05 18:15:48 managed-node2 sudo[25576]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Nov 05 18:15:49 managed-node2 platform-python[25579]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Nov 05 18:15:49 managed-node2 systemd[24551]: Started podman-25587.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Nov 05 18:15:49 managed-node2 kernel: tun: Universal TUN/TAP device driver, 1.6 Nov 05 18:15:49 managed-node2 systemd[24551]: Started rootless-netns-c4896361.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Nov 05 18:15:49 managed-node2 kernel: bridge: filtering via arp/ip/ip6tables is no longer available by default. Update your scripts to load br_netfilter if you need this. Nov 05 18:15:49 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): vethd8922e4f: link is not ready Nov 05 18:15:49 managed-node2 kernel: cni-podman1: port 1(vethd8922e4f) entered blocking state Nov 05 18:15:49 managed-node2 kernel: cni-podman1: port 1(vethd8922e4f) entered disabled state Nov 05 18:15:49 managed-node2 kernel: device vethd8922e4f entered promiscuous mode Nov 05 18:15:49 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready Nov 05 18:15:49 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready Nov 05 18:15:49 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vethd8922e4f: link becomes ready Nov 05 18:15:49 managed-node2 kernel: cni-podman1: port 1(vethd8922e4f) entered blocking state Nov 05 18:15:49 managed-node2 kernel: cni-podman1: port 1(vethd8922e4f) entered forwarding state Nov 05 18:15:49 managed-node2 dnsmasq[25775]: listening on cni-podman1(#3): 10.89.0.1 Nov 05 18:15:49 managed-node2 dnsmasq[25777]: started, version 2.79 cachesize 150 Nov 05 18:15:49 managed-node2 dnsmasq[25777]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify Nov 05 18:15:49 managed-node2 dnsmasq[25777]: using local addresses only for domain dns.podman Nov 05 18:15:49 managed-node2 dnsmasq[25777]: reading /etc/resolv.conf Nov 05 18:15:49 managed-node2 dnsmasq[25777]: using local addresses only for domain dns.podman Nov 05 18:15:49 managed-node2 dnsmasq[25777]: using nameserver 10.0.2.3#53 Nov 05 18:15:49 managed-node2 dnsmasq[25777]: using nameserver 10.29.169.13#53 Nov 05 18:15:49 managed-node2 dnsmasq[25777]: using nameserver 10.29.170.12#53 Nov 05 18:15:49 managed-node2 dnsmasq[25777]: using nameserver 10.2.32.1#53 Nov 05 18:15:49 managed-node2 dnsmasq[25777]: read /run/user/3001/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses Nov 05 18:15:49 managed-node2 conmon[25790]: conmon 72e9ba87921e9ed1fc79 : failed to write to /proc/self/oom_score_adj: Permission denied Nov 05 18:15:49 managed-node2 conmon[25791]: conmon 72e9ba87921e9ed1fc79 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/14/attach} Nov 05 18:15:49 managed-node2 conmon[25791]: conmon 72e9ba87921e9ed1fc79 : terminal_ctrl_fd: 14 Nov 05 18:15:49 managed-node2 conmon[25791]: conmon 72e9ba87921e9ed1fc79 : winsz read side: 17, winsz write side: 18 Nov 05 18:15:49 managed-node2 conmon[25791]: conmon 72e9ba87921e9ed1fc79 : container PID: 25801 Nov 05 18:15:49 managed-node2 conmon[25811]: conmon a84144e670d63247ef66 : failed to write to /proc/self/oom_score_adj: Permission denied Nov 05 18:15:49 managed-node2 conmon[25812]: conmon a84144e670d63247ef66 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/13/attach} Nov 05 18:15:49 managed-node2 conmon[25812]: conmon a84144e670d63247ef66 : terminal_ctrl_fd: 13 Nov 05 18:15:49 managed-node2 conmon[25812]: conmon a84144e670d63247ef66 : winsz read side: 16, winsz write side: 17 Nov 05 18:15:49 managed-node2 conmon[25812]: conmon a84144e670d63247ef66 : container PID: 25822 Nov 05 18:15:49 managed-node2 platform-python[25579]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Nov 05 18:15:49 managed-node2 platform-python[25579]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod: 75626cc357be55ce331eed1c9ae3f597c4e0297e17601b9803895009b5f47f02 Container: a84144e670d63247ef66f042342cf4acb07249e9a3ec2cf39205096089cc33c0 Nov 05 18:15:49 managed-node2 platform-python[25579]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2024-11-05T18:15:49-05:00" level=info msg="/bin/podman filtering at log level debug" time="2024-11-05T18:15:49-05:00" level=debug msg="Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2024-11-05T18:15:49-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2024-11-05T18:15:49-05:00" level=info msg="Using sqlite as database backend" time="2024-11-05T18:15:49-05:00" level=debug msg="Using graph driver overlay" time="2024-11-05T18:15:49-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" time="2024-11-05T18:15:49-05:00" level=debug msg="Using run root /run/user/3001/containers" time="2024-11-05T18:15:49-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" time="2024-11-05T18:15:49-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" time="2024-11-05T18:15:49-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" time="2024-11-05T18:15:49-05:00" level=debug msg="Using transient store: false" time="2024-11-05T18:15:49-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2024-11-05T18:15:49-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2024-11-05T18:15:49-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2024-11-05T18:15:49-05:00" level=debug msg="Cached value indicated that metacopy is not being used" time="2024-11-05T18:15:49-05:00" level=debug msg="Cached value indicated that native-diff is usable" time="2024-11-05T18:15:49-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" time="2024-11-05T18:15:49-05:00" level=debug msg="Initializing event backend file" time="2024-11-05T18:15:49-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2024-11-05T18:15:49-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2024-11-05T18:15:49-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2024-11-05T18:15:49-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2024-11-05T18:15:49-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2024-11-05T18:15:49-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2024-11-05T18:15:49-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2024-11-05T18:15:49-05:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" time="2024-11-05T18:15:49-05:00" level=info msg="Setting parallel job count to 7" time="2024-11-05T18:15:49-05:00" level=debug msg="Successfully loaded 1 networks" time="2024-11-05T18:15:49-05:00" level=debug msg="found free device name cni-podman1" time="2024-11-05T18:15:49-05:00" level=debug msg="found free ipv4 network subnet 10.89.0.0/24" time="2024-11-05T18:15:49-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2024-11-05 18:15:49.094227475 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2024-11-05T18:15:49-05:00" level=debug msg="Successfully loaded 2 networks" time="2024-11-05T18:15:49-05:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2024-11-05T18:15:49-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-11-05T18:15:49-05:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2024-11-05T18:15:49-05:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\" does not resolve to an image ID" time="2024-11-05T18:15:49-05:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2024-11-05T18:15:49-05:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\" does not resolve to an image ID" time="2024-11-05T18:15:49-05:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2024-11-05T18:15:49-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-11-05T18:15:49-05:00" level=debug msg="FROM \"scratch\"" time="2024-11-05T18:15:49-05:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are not supported" time="2024-11-05T18:15:49-05:00" level=debug msg="Check for idmapped mounts support " time="2024-11-05T18:15:49-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-11-05T18:15:49-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-11-05T18:15:49-05:00" level=debug msg="overlay: test mount indicated that volatile is being used" time="2024-11-05T18:15:49-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/56dee595692dd0e60ccff862046e893395c0e245189cf11a8117dee60b79f89c/empty,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/56dee595692dd0e60ccff862046e893395c0e245189cf11a8117dee60b79f89c/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/56dee595692dd0e60ccff862046e893395c0e245189cf11a8117dee60b79f89c/work,userxattr,volatile,context=\"system_u:object_r:container_file_t:s0:c571,c848\"" time="2024-11-05T18:15:49-05:00" level=debug msg="Container ID: ef3ca665b7108fd870c451b25634b9b39be71bf78e6693325aaf68508c38cf03" time="2024-11-05T18:15:49-05:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:copy Args:[/usr/libexec/podman/catatonit /catatonit] Flags:[] Attrs:map[] Message:COPY /usr/libexec/podman/catatonit /catatonit Heredocs:[] Original:COPY /usr/libexec/podman/catatonit /catatonit}" time="2024-11-05T18:15:49-05:00" level=debug msg="COPY []string(nil), imagebuilder.Copy{FromFS:false, From:\"\", Src:[]string{\"/usr/libexec/podman/catatonit\"}, Dest:\"/catatonit\", Download:false, Chown:\"\", Chmod:\"\", Checksum:\"\", Files:[]imagebuilder.File(nil)}" time="2024-11-05T18:15:49-05:00" level=debug msg="added content file:b0770577934d9536a010638e2bd49b7571c5d0a878a528b9fdba01abe9f2d5dd" time="2024-11-05T18:15:49-05:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:entrypoint Args:[/catatonit -P] Flags:[] Attrs:map[json:true] Message:ENTRYPOINT /catatonit -P Heredocs:[] Original:ENTRYPOINT [\"/catatonit\", \"-P\"]}" time="2024-11-05T18:15:49-05:00" level=debug msg="COMMIT localhost/podman-pause:4.9.4-dev-1708535009" time="2024-11-05T18:15:49-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\"" time="2024-11-05T18:15:49-05:00" level=debug msg="COMMIT \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\"" time="2024-11-05T18:15:49-05:00" level=debug msg="committing image with reference \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\" is allowed by policy" time="2024-11-05T18:15:49-05:00" level=debug msg="layer list: [\"56dee595692dd0e60ccff862046e893395c0e245189cf11a8117dee60b79f89c\"]" time="2024-11-05T18:15:49-05:00" level=debug msg="using \"/var/tmp/buildah1177084235\" to hold temporary data" time="2024-11-05T18:15:49-05:00" level=debug msg="Tar with options on /home/podman_basic_user/.local/share/containers/storage/overlay/56dee595692dd0e60ccff862046e893395c0e245189cf11a8117dee60b79f89c/diff" time="2024-11-05T18:15:49-05:00" level=debug msg="layer \"56dee595692dd0e60ccff862046e893395c0e245189cf11a8117dee60b79f89c\" size is 767488 bytes, uncompressed digest sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690, possibly-compressed digest sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690" time="2024-11-05T18:15:49-05:00" level=debug msg="OCIv1 config = {\"created\":\"2024-11-05T23:15:49.244898532Z\",\"architecture\":\"amd64\",\"os\":\"linux\",\"config\":{\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Entrypoint\":[\"/catatonit\",\"-P\"],\"Labels\":{\"io.buildah.version\":\"1.33.5\"}},\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"]},\"history\":[{\"created\":\"2024-11-05T23:15:49.244340037Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:b0770577934d9536a010638e2bd49b7571c5d0a878a528b9fdba01abe9f2d5dd in /catatonit \",\"empty_layer\":true},{\"created\":\"2024-11-05T23:15:49.248208259Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2024-11-05T18:15:49-05:00" level=debug msg="OCIv1 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.oci.image.manifest.v1+json\",\"config\":{\"mediaType\":\"application/vnd.oci.image.config.v1+json\",\"digest\":\"sha256:a7735e196abd39e2ecb2b8ad6a9318872e5702b043695e8e2b718776213ebb6a\",\"size\":668},\"layers\":[{\"mediaType\":\"application/vnd.oci.image.layer.v1.tar\",\"digest\":\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\",\"size\":767488}],\"annotations\":{\"org.opencontainers.image.base.digest\":\"\",\"org.opencontainers.image.base.name\":\"\"}}" time="2024-11-05T18:15:49-05:00" level=debug msg="Docker v2s2 config = {\"created\":\"2024-11-05T23:15:49.244898532Z\",\"container\":\"ef3ca665b7108fd870c451b25634b9b39be71bf78e6693325aaf68508c38cf03\",\"container_config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":[],\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.33.5\"}},\"config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":[],\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.33.5\"}},\"architecture\":\"amd64\",\"os\":\"linux\",\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"]},\"history\":[{\"created\":\"2024-11-05T23:15:49.244340037Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:b0770577934d9536a010638e2bd49b7571c5d0a878a528b9fdba01abe9f2d5dd in /catatonit \",\"empty_layer\":true},{\"created\":\"2024-11-05T23:15:49.248208259Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2024-11-05T18:15:49-05:00" level=debug msg="Docker v2s2 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.docker.distribution.manifest.v2+json\",\"config\":{\"mediaType\":\"application/vnd.docker.container.image.v1+json\",\"size\":1342,\"digest\":\"sha256:9aaf7473d760c978e4ac690e189b138bf6589b3c6aaaeea6e4fea379525b45f7\"},\"layers\":[{\"mediaType\":\"application/vnd.docker.image.rootfs.diff.tar\",\"size\":767488,\"digest\":\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"}]}" time="2024-11-05T18:15:49-05:00" level=debug msg="Using SQLite blob info cache at /home/podman_basic_user/.local/share/containers/cache/blob-info-cache-v1.sqlite" time="2024-11-05T18:15:49-05:00" level=debug msg="IsRunningImageAllowed for image containers-storage:" time="2024-11-05T18:15:49-05:00" level=debug msg=" Using transport \"containers-storage\" policy section " time="2024-11-05T18:15:49-05:00" level=debug msg=" Requirement 0: allowed" time="2024-11-05T18:15:49-05:00" level=debug msg="Overall: allowed" time="2024-11-05T18:15:49-05:00" level=debug msg="start reading config" time="2024-11-05T18:15:49-05:00" level=debug msg="finished reading config" time="2024-11-05T18:15:49-05:00" level=debug msg="Manifest has MIME type application/vnd.oci.image.manifest.v1+json, ordered candidate list [application/vnd.oci.image.manifest.v1+json, application/vnd.docker.distribution.manifest.v2+json, application/vnd.docker.distribution.manifest.v1+prettyjws, application/vnd.docker.distribution.manifest.v1+json]" time="2024-11-05T18:15:49-05:00" level=debug msg="... will first try using the original manifest unmodified" time="2024-11-05T18:15:49-05:00" level=debug msg="Checking if we can reuse blob sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690: general substitution = true, compression for MIME type \"application/vnd.oci.image.layer.v1.tar\" = true" time="2024-11-05T18:15:49-05:00" level=debug msg="reading layer \"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"" time="2024-11-05T18:15:49-05:00" level=debug msg="No compression detected" time="2024-11-05T18:15:49-05:00" level=debug msg="Using original blob without modification" time="2024-11-05T18:15:49-05:00" level=debug msg="Applying tar in /home/podman_basic_user/.local/share/containers/storage/overlay/d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690/diff" time="2024-11-05T18:15:49-05:00" level=debug msg="finished reading layer \"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"" time="2024-11-05T18:15:49-05:00" level=debug msg="No compression detected" time="2024-11-05T18:15:49-05:00" level=debug msg="Compression change for blob sha256:a7735e196abd39e2ecb2b8ad6a9318872e5702b043695e8e2b718776213ebb6a (\"application/vnd.oci.image.config.v1+json\") not supported" time="2024-11-05T18:15:49-05:00" level=debug msg="Using original blob without modification" time="2024-11-05T18:15:49-05:00" level=debug msg="setting image creation date to 2024-11-05 23:15:49.244898532 +0000 UTC" time="2024-11-05T18:15:49-05:00" level=debug msg="created new image ID \"a7735e196abd39e2ecb2b8ad6a9318872e5702b043695e8e2b718776213ebb6a\" with metadata \"{}\"" time="2024-11-05T18:15:49-05:00" level=debug msg="added name \"localhost/podman-pause:4.9.4-dev-1708535009\" to image \"a7735e196abd39e2ecb2b8ad6a9318872e5702b043695e8e2b718776213ebb6a\"" time="2024-11-05T18:15:49-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\"" time="2024-11-05T18:15:49-05:00" level=debug msg="printing final image id \"a7735e196abd39e2ecb2b8ad6a9318872e5702b043695e8e2b718776213ebb6a\"" time="2024-11-05T18:15:49-05:00" level=debug msg="Pod using bridge network mode" time="2024-11-05T18:15:49-05:00" level=debug msg="Got pod cgroup as /libpod_parent/75626cc357be55ce331eed1c9ae3f597c4e0297e17601b9803895009b5f47f02" time="2024-11-05T18:15:49-05:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2024-11-05T18:15:49-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-11-05T18:15:49-05:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2024-11-05T18:15:49-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@a7735e196abd39e2ecb2b8ad6a9318872e5702b043695e8e2b718776213ebb6a\"" time="2024-11-05T18:15:49-05:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2024-11-05T18:15:49-05:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@a7735e196abd39e2ecb2b8ad6a9318872e5702b043695e8e2b718776213ebb6a)" time="2024-11-05T18:15:49-05:00" level=debug msg="exporting opaque data as blob \"sha256:a7735e196abd39e2ecb2b8ad6a9318872e5702b043695e8e2b718776213ebb6a\"" time="2024-11-05T18:15:49-05:00" level=debug msg="Inspecting image a7735e196abd39e2ecb2b8ad6a9318872e5702b043695e8e2b718776213ebb6a" time="2024-11-05T18:15:49-05:00" level=debug msg="exporting opaque data as blob \"sha256:a7735e196abd39e2ecb2b8ad6a9318872e5702b043695e8e2b718776213ebb6a\"" time="2024-11-05T18:15:49-05:00" level=debug msg="Inspecting image a7735e196abd39e2ecb2b8ad6a9318872e5702b043695e8e2b718776213ebb6a" time="2024-11-05T18:15:49-05:00" level=debug msg="Inspecting image a7735e196abd39e2ecb2b8ad6a9318872e5702b043695e8e2b718776213ebb6a" time="2024-11-05T18:15:49-05:00" level=debug msg="using systemd mode: false" time="2024-11-05T18:15:49-05:00" level=debug msg="setting container name 75626cc357be-infra" time="2024-11-05T18:15:49-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2024-11-05T18:15:49-05:00" level=debug msg="Allocated lock 1 for container 72e9ba87921e9ed1fc793a857283fc786c54438ad1ad6856da13a8118b5c2513" time="2024-11-05T18:15:49-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@a7735e196abd39e2ecb2b8ad6a9318872e5702b043695e8e2b718776213ebb6a\"" time="2024-11-05T18:15:49-05:00" level=debug msg="exporting opaque data as blob \"sha256:a7735e196abd39e2ecb2b8ad6a9318872e5702b043695e8e2b718776213ebb6a\"" time="2024-11-05T18:15:49-05:00" level=debug msg="Created container \"72e9ba87921e9ed1fc793a857283fc786c54438ad1ad6856da13a8118b5c2513\"" time="2024-11-05T18:15:49-05:00" level=debug msg="Container \"72e9ba87921e9ed1fc793a857283fc786c54438ad1ad6856da13a8118b5c2513\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/72e9ba87921e9ed1fc793a857283fc786c54438ad1ad6856da13a8118b5c2513/userdata\"" time="2024-11-05T18:15:49-05:00" level=debug msg="Container \"72e9ba87921e9ed1fc793a857283fc786c54438ad1ad6856da13a8118b5c2513\" has run directory \"/run/user/3001/containers/overlay-containers/72e9ba87921e9ed1fc793a857283fc786c54438ad1ad6856da13a8118b5c2513/userdata\"" time="2024-11-05T18:15:49-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-11-05T18:15:49-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-11-05T18:15:49-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-11-05T18:15:49-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-11-05T18:15:49-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-11-05T18:15:49-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-11-05T18:15:49-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-11-05T18:15:49-05:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2024-11-05T18:15:49-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-11-05T18:15:49-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-11-05T18:15:49-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-11-05T18:15:49-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-11-05T18:15:49-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-11-05T18:15:49-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-11-05T18:15:49-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-11-05T18:15:49-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-11-05T18:15:49-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-11-05T18:15:49-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-11-05T18:15:49-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-11-05T18:15:49-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-11-05T18:15:49-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-11-05T18:15:49-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-11-05T18:15:49-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-11-05T18:15:49-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-11-05T18:15:49-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-11-05T18:15:49-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-11-05T18:15:49-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-11-05T18:15:49-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-11-05T18:15:49-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-11-05T18:15:49-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-11-05T18:15:49-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-11-05T18:15:49-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-11-05T18:15:49-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-11-05T18:15:49-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-11-05T18:15:49-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-11-05T18:15:49-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-11-05T18:15:49-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-11-05T18:15:49-05:00" level=debug msg="using systemd mode: false" time="2024-11-05T18:15:49-05:00" level=debug msg="adding container to pod httpd1" time="2024-11-05T18:15:49-05:00" level=debug msg="setting container name httpd1-httpd1" time="2024-11-05T18:15:49-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2024-11-05T18:15:49-05:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2024-11-05T18:15:49-05:00" level=debug msg="Adding mount /proc" time="2024-11-05T18:15:49-05:00" level=debug msg="Adding mount /dev" time="2024-11-05T18:15:49-05:00" level=debug msg="Adding mount /dev/pts" time="2024-11-05T18:15:49-05:00" level=debug msg="Adding mount /dev/mqueue" time="2024-11-05T18:15:49-05:00" level=debug msg="Adding mount /sys" time="2024-11-05T18:15:49-05:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2024-11-05T18:15:49-05:00" level=debug msg="Allocated lock 2 for container a84144e670d63247ef66f042342cf4acb07249e9a3ec2cf39205096089cc33c0" time="2024-11-05T18:15:49-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-11-05T18:15:49-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-11-05T18:15:49-05:00" level=debug msg="Created container \"a84144e670d63247ef66f042342cf4acb07249e9a3ec2cf39205096089cc33c0\"" time="2024-11-05T18:15:49-05:00" level=debug msg="Container \"a84144e670d63247ef66f042342cf4acb07249e9a3ec2cf39205096089cc33c0\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/a84144e670d63247ef66f042342cf4acb07249e9a3ec2cf39205096089cc33c0/userdata\"" time="2024-11-05T18:15:49-05:00" level=debug msg="Container \"a84144e670d63247ef66f042342cf4acb07249e9a3ec2cf39205096089cc33c0\" has run directory \"/run/user/3001/containers/overlay-containers/a84144e670d63247ef66f042342cf4acb07249e9a3ec2cf39205096089cc33c0/userdata\"" time="2024-11-05T18:15:49-05:00" level=debug msg="Strongconnecting node 72e9ba87921e9ed1fc793a857283fc786c54438ad1ad6856da13a8118b5c2513" time="2024-11-05T18:15:49-05:00" level=debug msg="Pushed 72e9ba87921e9ed1fc793a857283fc786c54438ad1ad6856da13a8118b5c2513 onto stack" time="2024-11-05T18:15:49-05:00" level=debug msg="Finishing node 72e9ba87921e9ed1fc793a857283fc786c54438ad1ad6856da13a8118b5c2513. Popped 72e9ba87921e9ed1fc793a857283fc786c54438ad1ad6856da13a8118b5c2513 off stack" time="2024-11-05T18:15:49-05:00" level=debug msg="Strongconnecting node a84144e670d63247ef66f042342cf4acb07249e9a3ec2cf39205096089cc33c0" time="2024-11-05T18:15:49-05:00" level=debug msg="Pushed a84144e670d63247ef66f042342cf4acb07249e9a3ec2cf39205096089cc33c0 onto stack" time="2024-11-05T18:15:49-05:00" level=debug msg="Finishing node a84144e670d63247ef66f042342cf4acb07249e9a3ec2cf39205096089cc33c0. Popped a84144e670d63247ef66f042342cf4acb07249e9a3ec2cf39205096089cc33c0 off stack" time="2024-11-05T18:15:49-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/R7LR5EONP34NQIIP62KJBDNR74,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/6c9e547073a5398784648d0814a8cd8e5a9443ae84562efc8c0dff37ad0ad51e/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/6c9e547073a5398784648d0814a8cd8e5a9443ae84562efc8c0dff37ad0ad51e/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c401,c491\"" time="2024-11-05T18:15:49-05:00" level=debug msg="Mounted container \"72e9ba87921e9ed1fc793a857283fc786c54438ad1ad6856da13a8118b5c2513\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/6c9e547073a5398784648d0814a8cd8e5a9443ae84562efc8c0dff37ad0ad51e/merged\"" time="2024-11-05T18:15:49-05:00" level=debug msg="Created root filesystem for container 72e9ba87921e9ed1fc793a857283fc786c54438ad1ad6856da13a8118b5c2513 at /home/podman_basic_user/.local/share/containers/storage/overlay/6c9e547073a5398784648d0814a8cd8e5a9443ae84562efc8c0dff37ad0ad51e/merged" time="2024-11-05T18:15:49-05:00" level=debug msg="Made network namespace at /run/user/3001/netns/netns-d4bf7674-24ab-01f0-164f-98af9d1e582c for container 72e9ba87921e9ed1fc793a857283fc786c54438ad1ad6856da13a8118b5c2513" time="2024-11-05T18:15:49-05:00" level=debug msg="creating rootless network namespace with name \"rootless-netns-d22c9f230d0691b8f418\"" time="2024-11-05T18:15:49-05:00" level=debug msg="slirp4netns command: /bin/slirp4netns --disable-host-loopback --mtu=65520 --enable-sandbox --enable-seccomp --enable-ipv6 -c -r 3 --netns-type=path /run/user/3001/netns/rootless-netns-d22c9f230d0691b8f418 tap0" time="2024-11-05T18:15:49-05:00" level=debug msg="The path of /etc/resolv.conf in the mount ns is \"/etc/resolv.conf\"" time="2024-11-05T18:15:49-05:00" level=debug msg="cni result for container 72e9ba87921e9ed1fc793a857283fc786c54438ad1ad6856da13a8118b5c2513 network podman-default-kube-network: &{0.4.0 [{Name:cni-podman1 Mac:c2:fc:01:6c:e7:2a Sandbox:} {Name:vethd8922e4f Mac:2a:28:ce:70:c7:e0 Sandbox:} {Name:eth0 Mac:6a:b3:ad:45:c0:0b Sandbox:/run/user/3001/netns/netns-d4bf7674-24ab-01f0-164f-98af9d1e582c}] [{Version:4 Interface:0xc000b78328 Address:{IP:10.89.0.2 Mask:ffffff00} Gateway:10.89.0.1}] [{Dst:{IP:0.0.0.0 Mask:00000000} GW:}] {[10.89.0.1] [dns.podman] []}}" time="2024-11-05T18:15:49-05:00" level=debug msg="rootlessport: time=\"2024-11-05T18:15:49-05:00\" level=info msg=\"Starting parent driver\"\n" time="2024-11-05T18:15:49-05:00" level=debug msg="rootlessport: time=\"2024-11-05T18:15:49-05:00\" level=info msg=\"opaque=map[builtin.readypipepath:/run/user/3001/libpod/tmp/rootlessport1377165465/.bp-ready.pipe builtin.socketpath:/run/user/3001/libpod/tmp/rootlessport1377165465/.bp.sock]\"\n" time="2024-11-05T18:15:49-05:00" level=debug msg="rootlessport: time=\"2024-11-05T18:15:49-05:00\" level=info msg=\"Starting child driver in child netns (\\\"/proc/self/exe\\\" [rootlessport-child])\"\n" time="2024-11-05T18:15:49-05:00" level=debug msg="rootlessport: time=\"2024-11-05T18:15:49-05:00\" level=info msg=\"Waiting for initComplete\"\n" time="2024-11-05T18:15:49-05:00" level=debug msg="rootlessport: time=\"2024-11-05T18:15:49-05:00\" level=info msg=\"initComplete is closed; parent and child established the communication channel\"\ntime=\"2024-11-05T18:15:49-05:00\" level=info msg=\"Exposing ports [{ 80 15001 1 tcp}]\"\n" time="2024-11-05T18:15:49-05:00" level=debug msg="rootlessport: time=\"2024-11-05T18:15:49-05:00\" level=info msg=Ready\n" time="2024-11-05T18:15:49-05:00" level=debug msg="rootlessport is ready" time="2024-11-05T18:15:49-05:00" level=debug msg="/etc/system-fips does not exist on host, not mounting FIPS mode subscription" time="2024-11-05T18:15:49-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2024-11-05T18:15:49-05:00" level=debug msg="Workdir \"/\" resolved to host path \"/home/podman_basic_user/.local/share/containers/storage/overlay/6c9e547073a5398784648d0814a8cd8e5a9443ae84562efc8c0dff37ad0ad51e/merged\"" time="2024-11-05T18:15:49-05:00" level=debug msg="Created OCI spec for container 72e9ba87921e9ed1fc793a857283fc786c54438ad1ad6856da13a8118b5c2513 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/72e9ba87921e9ed1fc793a857283fc786c54438ad1ad6856da13a8118b5c2513/userdata/config.json" time="2024-11-05T18:15:49-05:00" level=debug msg="Got pod cgroup as " time="2024-11-05T18:15:49-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2024-11-05T18:15:49-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c 72e9ba87921e9ed1fc793a857283fc786c54438ad1ad6856da13a8118b5c2513 -u 72e9ba87921e9ed1fc793a857283fc786c54438ad1ad6856da13a8118b5c2513 -r /usr/bin/runc -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/72e9ba87921e9ed1fc793a857283fc786c54438ad1ad6856da13a8118b5c2513/userdata -p /run/user/3001/containers/overlay-containers/72e9ba87921e9ed1fc793a857283fc786c54438ad1ad6856da13a8118b5c2513/userdata/pidfile -n 75626cc357be-infra --exit-dir /run/user/3001/libpod/tmp/exits --full-attach -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/72e9ba87921e9ed1fc793a857283fc786c54438ad1ad6856da13a8118b5c2513/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/72e9ba87921e9ed1fc793a857283fc786c54438ad1ad6856da13a8118b5c2513/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg cgroupfs --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg 72e9ba87921e9ed1fc793a857283fc786c54438ad1ad6856da13a8118b5c2513]" time="2024-11-05T18:15:49-05:00" level=info msg="Failed to add conmon to cgroupfs sandbox cgroup: creating cgroup for blkio: mkdir /sys/fs/cgroup/blkio/libpod_parent: permission denied" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2024-11-05T18:15:49-05:00" level=debug msg="Received: 25801" time="2024-11-05T18:15:49-05:00" level=info msg="Got Conmon PID as 25791" time="2024-11-05T18:15:49-05:00" level=debug msg="Created container 72e9ba87921e9ed1fc793a857283fc786c54438ad1ad6856da13a8118b5c2513 in OCI runtime" time="2024-11-05T18:15:49-05:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2024-11-05T18:15:49-05:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2024-11-05T18:15:49-05:00" level=debug msg="Starting container 72e9ba87921e9ed1fc793a857283fc786c54438ad1ad6856da13a8118b5c2513 with command [/catatonit -P]" time="2024-11-05T18:15:49-05:00" level=debug msg="Started container 72e9ba87921e9ed1fc793a857283fc786c54438ad1ad6856da13a8118b5c2513" time="2024-11-05T18:15:49-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/RU56B4T46HYJR7HVIE5QANEX57,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/bd35d8658d5eb7c7b9aa23f16ab03ea3c3df6aeab254d9ab036dc92c236fe3c2/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/bd35d8658d5eb7c7b9aa23f16ab03ea3c3df6aeab254d9ab036dc92c236fe3c2/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c401,c491\"" time="2024-11-05T18:15:49-05:00" level=debug msg="Mounted container \"a84144e670d63247ef66f042342cf4acb07249e9a3ec2cf39205096089cc33c0\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/bd35d8658d5eb7c7b9aa23f16ab03ea3c3df6aeab254d9ab036dc92c236fe3c2/merged\"" time="2024-11-05T18:15:49-05:00" level=debug msg="Created root filesystem for container a84144e670d63247ef66f042342cf4acb07249e9a3ec2cf39205096089cc33c0 at /home/podman_basic_user/.local/share/containers/storage/overlay/bd35d8658d5eb7c7b9aa23f16ab03ea3c3df6aeab254d9ab036dc92c236fe3c2/merged" time="2024-11-05T18:15:49-05:00" level=debug msg="/etc/system-fips does not exist on host, not mounting FIPS mode subscription" time="2024-11-05T18:15:49-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2024-11-05T18:15:49-05:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2024-11-05T18:15:49-05:00" level=debug msg="Created OCI spec for container a84144e670d63247ef66f042342cf4acb07249e9a3ec2cf39205096089cc33c0 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/a84144e670d63247ef66f042342cf4acb07249e9a3ec2cf39205096089cc33c0/userdata/config.json" time="2024-11-05T18:15:49-05:00" level=debug msg="Got pod cgroup as " time="2024-11-05T18:15:49-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2024-11-05T18:15:49-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c a84144e670d63247ef66f042342cf4acb07249e9a3ec2cf39205096089cc33c0 -u a84144e670d63247ef66f042342cf4acb07249e9a3ec2cf39205096089cc33c0 -r /usr/bin/runc -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/a84144e670d63247ef66f042342cf4acb07249e9a3ec2cf39205096089cc33c0/userdata -p /run/user/3001/containers/overlay-containers/a84144e670d63247ef66f042342cf4acb07249e9a3ec2cf39205096089cc33c0/userdata/pidfile -n httpd1-httpd1 --exit-dir /run/user/3001/libpod/tmp/exits --full-attach -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/a84144e670d63247ef66f042342cf4acb07249e9a3ec2cf39205096089cc33c0/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/a84144e670d63247ef66f042342cf4acb07249e9a3ec2cf39205096089cc33c0/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg cgroupfs --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg a84144e670d63247ef66f042342cf4acb07249e9a3ec2cf39205096089cc33c0]" time="2024-11-05T18:15:49-05:00" level=info msg="Failed to add conmon to cgroupfs sandbox cgroup: creating cgroup for pids: mkdir /sys/fs/cgroup/pids/conmon: permission denied" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2024-11-05T18:15:49-05:00" level=debug msg="Received: 25822" time="2024-11-05T18:15:49-05:00" level=info msg="Got Conmon PID as 25812" time="2024-11-05T18:15:49-05:00" level=debug msg="Created container a84144e670d63247ef66f042342cf4acb07249e9a3ec2cf39205096089cc33c0 in OCI runtime" time="2024-11-05T18:15:49-05:00" level=debug msg="Starting container a84144e670d63247ef66f042342cf4acb07249e9a3ec2cf39205096089cc33c0 with command [/bin/busybox-extras httpd -f -p 80]" time="2024-11-05T18:15:49-05:00" level=debug msg="Started container a84144e670d63247ef66f042342cf4acb07249e9a3ec2cf39205096089cc33c0" time="2024-11-05T18:15:49-05:00" level=debug msg="Called kube.PersistentPostRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2024-11-05T18:15:49-05:00" level=debug msg="Shutting down engines" Nov 05 18:15:49 managed-node2 platform-python[25579]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Nov 05 18:15:49 managed-node2 sudo[25576]: pam_unix(sudo:session): session closed for user podman_basic_user Nov 05 18:15:50 managed-node2 sudo[25953]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dzvjlurslxxesexwzwuwnxkfzqygiljz ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1730848550.0766416-16687-193932041339673/AnsiballZ_systemd.py' Nov 05 18:15:50 managed-node2 sudo[25953]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Nov 05 18:15:50 managed-node2 platform-python[25956]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Nov 05 18:15:50 managed-node2 systemd[24551]: Reloading. Nov 05 18:15:50 managed-node2 sudo[25953]: pam_unix(sudo:session): session closed for user podman_basic_user Nov 05 18:15:50 managed-node2 dnsmasq[25777]: listening on cni-podman1(#3): fe80::c0fc:1ff:fe6c:e72a%cni-podman1 Nov 05 18:15:50 managed-node2 sudo[26090]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-coumeyfjmlracionbrsbylvyvpsiwwzx ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1730848550.817274-16725-8553955628814/AnsiballZ_systemd.py' Nov 05 18:15:50 managed-node2 sudo[26090]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Nov 05 18:15:51 managed-node2 platform-python[26093]: ansible-systemd Invoked with name=podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service scope=user enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Nov 05 18:15:51 managed-node2 systemd[24551]: Reloading. Nov 05 18:15:51 managed-node2 sudo[26090]: pam_unix(sudo:session): session closed for user podman_basic_user Nov 05 18:15:51 managed-node2 sudo[26229]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dmrrkrgmanhxsaeyqryofdumwgqurepu ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1730848551.533601-16759-98240339959146/AnsiballZ_systemd.py' Nov 05 18:15:51 managed-node2 sudo[26229]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Nov 05 18:15:52 managed-node2 platform-python[26232]: ansible-systemd Invoked with name=podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Nov 05 18:15:52 managed-node2 systemd[24551]: Created slice podman\x2dkube.slice. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Nov 05 18:15:52 managed-node2 systemd[24551]: Starting A template for running K8s workloads via podman-kube-play... -- Subject: Unit UNIT has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun starting up. Nov 05 18:15:52 managed-node2 conmon[25791]: conmon 72e9ba87921e9ed1fc79 : container 25801 exited with status 137 Nov 05 18:15:52 managed-node2 conmon[25812]: conmon a84144e670d63247ef66 : container 25822 exited with status 137 Nov 05 18:15:52 managed-node2 /usr/bin/podman[26253]: time="2024-11-05T18:15:52-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup 72e9ba87921e9ed1fc793a857283fc786c54438ad1ad6856da13a8118b5c2513)" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26253]: time="2024-11-05T18:15:52-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26253]: time="2024-11-05T18:15:52-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26253]: time="2024-11-05T18:15:52-05:00" level=info msg="Using sqlite as database backend" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26253]: time="2024-11-05T18:15:52-05:00" level=debug msg="Using graph driver overlay" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26253]: time="2024-11-05T18:15:52-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26253]: time="2024-11-05T18:15:52-05:00" level=debug msg="Using run root /run/user/3001/containers" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26253]: time="2024-11-05T18:15:52-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26253]: time="2024-11-05T18:15:52-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26253]: time="2024-11-05T18:15:52-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26253]: time="2024-11-05T18:15:52-05:00" level=debug msg="Using transient store: false" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26253]: time="2024-11-05T18:15:52-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26253]: time="2024-11-05T18:15:52-05:00" level=debug msg="Cached value indicated that overlay is supported" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26253]: time="2024-11-05T18:15:52-05:00" level=debug msg="Cached value indicated that overlay is supported" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26253]: time="2024-11-05T18:15:52-05:00" level=debug msg="Cached value indicated that metacopy is not being used" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26253]: time="2024-11-05T18:15:52-05:00" level=debug msg="Cached value indicated that native-diff is usable" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26253]: time="2024-11-05T18:15:52-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26253]: time="2024-11-05T18:15:52-05:00" level=debug msg="Initializing event backend file" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26253]: time="2024-11-05T18:15:52-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26253]: time="2024-11-05T18:15:52-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26253]: time="2024-11-05T18:15:52-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26253]: time="2024-11-05T18:15:52-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26253]: time="2024-11-05T18:15:52-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26253]: time="2024-11-05T18:15:52-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26253]: time="2024-11-05T18:15:52-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26253]: time="2024-11-05T18:15:52-05:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26253]: time="2024-11-05T18:15:52-05:00" level=info msg="Setting parallel job count to 7" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26265]: time="2024-11-05T18:15:52-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup a84144e670d63247ef66f042342cf4acb07249e9a3ec2cf39205096089cc33c0)" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26265]: time="2024-11-05T18:15:52-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26265]: time="2024-11-05T18:15:52-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26265]: time="2024-11-05T18:15:52-05:00" level=info msg="Using sqlite as database backend" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26265]: time="2024-11-05T18:15:52-05:00" level=debug msg="Using graph driver overlay" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26265]: time="2024-11-05T18:15:52-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26265]: time="2024-11-05T18:15:52-05:00" level=debug msg="Using run root /run/user/3001/containers" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26265]: time="2024-11-05T18:15:52-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26265]: time="2024-11-05T18:15:52-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26265]: time="2024-11-05T18:15:52-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26265]: time="2024-11-05T18:15:52-05:00" level=debug msg="Using transient store: false" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26265]: time="2024-11-05T18:15:52-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26265]: time="2024-11-05T18:15:52-05:00" level=debug msg="Cached value indicated that overlay is supported" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26265]: time="2024-11-05T18:15:52-05:00" level=debug msg="Cached value indicated that overlay is supported" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26265]: time="2024-11-05T18:15:52-05:00" level=debug msg="Cached value indicated that metacopy is not being used" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26265]: time="2024-11-05T18:15:52-05:00" level=debug msg="Cached value indicated that native-diff is usable" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26265]: time="2024-11-05T18:15:52-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26265]: time="2024-11-05T18:15:52-05:00" level=debug msg="Initializing event backend file" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26265]: time="2024-11-05T18:15:52-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26265]: time="2024-11-05T18:15:52-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26265]: time="2024-11-05T18:15:52-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26265]: time="2024-11-05T18:15:52-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26265]: time="2024-11-05T18:15:52-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26265]: time="2024-11-05T18:15:52-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26265]: time="2024-11-05T18:15:52-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26265]: time="2024-11-05T18:15:52-05:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26265]: time="2024-11-05T18:15:52-05:00" level=info msg="Setting parallel job count to 7" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26265]: time="2024-11-05T18:15:52-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup a84144e670d63247ef66f042342cf4acb07249e9a3ec2cf39205096089cc33c0)" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26265]: time="2024-11-05T18:15:52-05:00" level=debug msg="Shutting down engines" Nov 05 18:15:52 managed-node2 kernel: cni-podman1: port 1(vethd8922e4f) entered disabled state Nov 05 18:15:52 managed-node2 kernel: device vethd8922e4f left promiscuous mode Nov 05 18:15:52 managed-node2 kernel: cni-podman1: port 1(vethd8922e4f) entered disabled state Nov 05 18:15:52 managed-node2 /usr/bin/podman[26253]: time="2024-11-05T18:15:52-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup 72e9ba87921e9ed1fc793a857283fc786c54438ad1ad6856da13a8118b5c2513)" Nov 05 18:15:52 managed-node2 /usr/bin/podman[26253]: time="2024-11-05T18:15:52-05:00" level=debug msg="Shutting down engines" Nov 05 18:15:52 managed-node2 podman[26238]: Pods stopped: Nov 05 18:15:52 managed-node2 podman[26238]: 75626cc357be55ce331eed1c9ae3f597c4e0297e17601b9803895009b5f47f02 Nov 05 18:15:52 managed-node2 podman[26238]: Pods removed: Nov 05 18:15:52 managed-node2 podman[26238]: 75626cc357be55ce331eed1c9ae3f597c4e0297e17601b9803895009b5f47f02 Nov 05 18:15:52 managed-node2 podman[26238]: Secrets removed: Nov 05 18:15:52 managed-node2 podman[26238]: Volumes removed: Nov 05 18:15:52 managed-node2 systemd[24551]: Started rootless-netns-e316e8d7.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Nov 05 18:15:52 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): veth26c773e1: link is not ready Nov 05 18:15:52 managed-node2 kernel: cni-podman1: port 1(veth26c773e1) entered blocking state Nov 05 18:15:52 managed-node2 kernel: cni-podman1: port 1(veth26c773e1) entered disabled state Nov 05 18:15:52 managed-node2 kernel: device veth26c773e1 entered promiscuous mode Nov 05 18:15:52 managed-node2 kernel: cni-podman1: port 1(veth26c773e1) entered blocking state Nov 05 18:15:52 managed-node2 kernel: cni-podman1: port 1(veth26c773e1) entered forwarding state Nov 05 18:15:52 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): veth26c773e1: link becomes ready Nov 05 18:15:52 managed-node2 dnsmasq[26487]: listening on cni-podman1(#3): 10.89.0.1 Nov 05 18:15:52 managed-node2 dnsmasq[26489]: started, version 2.79 cachesize 150 Nov 05 18:15:52 managed-node2 dnsmasq[26489]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify Nov 05 18:15:52 managed-node2 dnsmasq[26489]: using local addresses only for domain dns.podman Nov 05 18:15:52 managed-node2 dnsmasq[26489]: reading /etc/resolv.conf Nov 05 18:15:52 managed-node2 dnsmasq[26489]: using local addresses only for domain dns.podman Nov 05 18:15:52 managed-node2 dnsmasq[26489]: using nameserver 10.0.2.3#53 Nov 05 18:15:52 managed-node2 dnsmasq[26489]: using nameserver 10.29.169.13#53 Nov 05 18:15:52 managed-node2 dnsmasq[26489]: using nameserver 10.29.170.12#53 Nov 05 18:15:52 managed-node2 dnsmasq[26489]: using nameserver 10.2.32.1#53 Nov 05 18:15:52 managed-node2 dnsmasq[26489]: read /run/user/3001/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses Nov 05 18:15:53 managed-node2 podman[26238]: Pod: Nov 05 18:15:53 managed-node2 podman[26238]: 95db8d3a8cec46d84288f877334e5b6c99afafbed8df07230dc29b7473bd0f2b Nov 05 18:15:53 managed-node2 podman[26238]: Container: Nov 05 18:15:53 managed-node2 podman[26238]: c1f54e5715365be4e03a675abaa963a8a09fd07cc15a8972e44766ac995409fc Nov 05 18:15:53 managed-node2 systemd[24551]: Started A template for running K8s workloads via podman-kube-play. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Nov 05 18:15:53 managed-node2 sudo[26229]: pam_unix(sudo:session): session closed for user podman_basic_user Nov 05 18:15:53 managed-node2 platform-python[26665]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Nov 05 18:15:54 managed-node2 dnsmasq[26489]: listening on cni-podman1(#3): fe80::303d:f3ff:feeb:2c6a%cni-podman1 Nov 05 18:15:54 managed-node2 platform-python[26789]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:15:56 managed-node2 platform-python[26914]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd2.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:15:57 managed-node2 platform-python[27038]: ansible-file Invoked with path=/tmp/lsr_g8amvhe2_podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:15:58 managed-node2 platform-python[27161]: ansible-file Invoked with path=/tmp/lsr_g8amvhe2_podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:15:58 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Nov 05 18:15:58 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Nov 05 18:15:59 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Nov 05 18:16:00 managed-node2 platform-python[27450]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:16:00 managed-node2 platform-python[27573]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:16:01 managed-node2 platform-python[27696]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 05 18:16:01 managed-node2 platform-python[27795]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd2.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1730848561.1574502-17168-24462038387786/source _original_basename=tmpzoc6kn_x follow=False checksum=0324b23b0f5379e8ac684ffe787351a3edcfd2b8 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:16:02 managed-node2 platform-python[27920]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Nov 05 18:16:02 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_ebe2b5a6eec80b89fff88ea654df8969642f7c5e96c95f16cbe7ce9f39251fa3.slice. -- Subject: Unit machine-libpod_pod_ebe2b5a6eec80b89fff88ea654df8969642f7c5e96c95f16cbe7ce9f39251fa3.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_ebe2b5a6eec80b89fff88ea654df8969642f7c5e96c95f16cbe7ce9f39251fa3.slice has finished starting up. -- -- The start-up result is done. Nov 05 18:16:02 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): vethcc1f0013: link is not ready Nov 05 18:16:02 managed-node2 NetworkManager[673]: [1730848562.7190] manager: (cni-podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/3) Nov 05 18:16:02 managed-node2 NetworkManager[673]: [1730848562.7200] manager: (vethcc1f0013): new Veth device (/org/freedesktop/NetworkManager/Devices/4) Nov 05 18:16:02 managed-node2 kernel: cni-podman1: port 1(vethcc1f0013) entered blocking state Nov 05 18:16:02 managed-node2 kernel: cni-podman1: port 1(vethcc1f0013) entered disabled state Nov 05 18:16:02 managed-node2 kernel: device vethcc1f0013 entered promiscuous mode Nov 05 18:16:02 managed-node2 systemd-udevd[27969]: Using default interface naming scheme 'rhel-8.0'. Nov 05 18:16:02 managed-node2 systemd-udevd[27969]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Nov 05 18:16:02 managed-node2 systemd-udevd[27969]: Could not generate persistent MAC address for cni-podman1: No such file or directory Nov 05 18:16:02 managed-node2 systemd-udevd[27970]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Nov 05 18:16:02 managed-node2 systemd-udevd[27970]: Could not generate persistent MAC address for vethcc1f0013: No such file or directory Nov 05 18:16:02 managed-node2 NetworkManager[673]: [1730848562.7445] device (cni-podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', sys-iface-state: 'external') Nov 05 18:16:02 managed-node2 dbus-daemon[608]: [system] Activating via systemd: service name='org.freedesktop.nm_dispatcher' unit='dbus-org.freedesktop.nm-dispatcher.service' requested by ':1.5' (uid=0 pid=673 comm="/usr/sbin/NetworkManager --no-daemon " label="system_u:system_r:NetworkManager_t:s0") Nov 05 18:16:02 managed-node2 NetworkManager[673]: [1730848562.7449] device (cni-podman1): state change: unavailable -> disconnected (reason 'connection-assumed', sys-iface-state: 'external') Nov 05 18:16:02 managed-node2 NetworkManager[673]: [1730848562.7456] device (cni-podman1): Activation: starting connection 'cni-podman1' (4f126b22-1a5f-4a97-94f2-34058e6af6b5) Nov 05 18:16:02 managed-node2 NetworkManager[673]: [1730848562.7458] device (cni-podman1): state change: disconnected -> prepare (reason 'none', sys-iface-state: 'external') Nov 05 18:16:02 managed-node2 NetworkManager[673]: [1730848562.7460] device (cni-podman1): state change: prepare -> config (reason 'none', sys-iface-state: 'external') Nov 05 18:16:02 managed-node2 NetworkManager[673]: [1730848562.7462] device (cni-podman1): state change: config -> ip-config (reason 'none', sys-iface-state: 'external') Nov 05 18:16:02 managed-node2 NetworkManager[673]: [1730848562.7464] device (cni-podman1): state change: ip-config -> ip-check (reason 'none', sys-iface-state: 'external') Nov 05 18:16:02 managed-node2 systemd[1]: Starting Network Manager Script Dispatcher Service... -- Subject: Unit NetworkManager-dispatcher.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit NetworkManager-dispatcher.service has begun starting up. Nov 05 18:16:02 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready Nov 05 18:16:02 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready Nov 05 18:16:02 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vethcc1f0013: link becomes ready Nov 05 18:16:02 managed-node2 kernel: cni-podman1: port 1(vethcc1f0013) entered blocking state Nov 05 18:16:02 managed-node2 kernel: cni-podman1: port 1(vethcc1f0013) entered forwarding state Nov 05 18:16:02 managed-node2 NetworkManager[673]: [1730848562.7837] device (vethcc1f0013): carrier: link connected Nov 05 18:16:02 managed-node2 NetworkManager[673]: [1730848562.7840] device (cni-podman1): carrier: link connected Nov 05 18:16:02 managed-node2 dbus-daemon[608]: [system] Successfully activated service 'org.freedesktop.nm_dispatcher' Nov 05 18:16:02 managed-node2 systemd[1]: Started Network Manager Script Dispatcher Service. -- Subject: Unit NetworkManager-dispatcher.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit NetworkManager-dispatcher.service has finished starting up. -- -- The start-up result is done. Nov 05 18:16:02 managed-node2 NetworkManager[673]: [1730848562.8163] device (cni-podman1): state change: ip-check -> secondaries (reason 'none', sys-iface-state: 'external') Nov 05 18:16:02 managed-node2 NetworkManager[673]: [1730848562.8165] device (cni-podman1): state change: secondaries -> activated (reason 'none', sys-iface-state: 'external') Nov 05 18:16:02 managed-node2 NetworkManager[673]: [1730848562.8169] device (cni-podman1): Activation: successful, device activated. Nov 05 18:16:02 managed-node2 dnsmasq[28090]: listening on cni-podman1(#3): 10.89.0.1 Nov 05 18:16:02 managed-node2 dnsmasq[28094]: started, version 2.79 cachesize 150 Nov 05 18:16:02 managed-node2 dnsmasq[28094]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify Nov 05 18:16:02 managed-node2 dnsmasq[28094]: using local addresses only for domain dns.podman Nov 05 18:16:02 managed-node2 dnsmasq[28094]: reading /etc/resolv.conf Nov 05 18:16:02 managed-node2 dnsmasq[28094]: using local addresses only for domain dns.podman Nov 05 18:16:02 managed-node2 dnsmasq[28094]: using nameserver 10.29.169.13#53 Nov 05 18:16:02 managed-node2 dnsmasq[28094]: using nameserver 10.29.170.12#53 Nov 05 18:16:02 managed-node2 dnsmasq[28094]: using nameserver 10.2.32.1#53 Nov 05 18:16:02 managed-node2 dnsmasq[28094]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses Nov 05 18:16:02 managed-node2 systemd[1]: Started libpod-conmon-7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08.scope. -- Subject: Unit libpod-conmon-7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-conmon-7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08.scope has finished starting up. -- -- The start-up result is done. Nov 05 18:16:02 managed-node2 conmon[28096]: conmon 7d9f02efd188f8ae313d : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/13/attach} Nov 05 18:16:02 managed-node2 conmon[28096]: conmon 7d9f02efd188f8ae313d : terminal_ctrl_fd: 13 Nov 05 18:16:02 managed-node2 conmon[28096]: conmon 7d9f02efd188f8ae313d : winsz read side: 17, winsz write side: 18 Nov 05 18:16:02 managed-node2 systemd[1]: Started libcontainer container 7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08. -- Subject: Unit libpod-7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08.scope has finished starting up. -- -- The start-up result is done. Nov 05 18:16:03 managed-node2 conmon[28096]: conmon 7d9f02efd188f8ae313d : container PID: 28105 Nov 05 18:16:03 managed-node2 systemd[1]: Started libpod-conmon-1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd.scope. -- Subject: Unit libpod-conmon-1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-conmon-1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd.scope has finished starting up. -- -- The start-up result is done. Nov 05 18:16:03 managed-node2 conmon[28117]: conmon 1032dc69fd1779256abc : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/12/attach} Nov 05 18:16:03 managed-node2 conmon[28117]: conmon 1032dc69fd1779256abc : terminal_ctrl_fd: 12 Nov 05 18:16:03 managed-node2 conmon[28117]: conmon 1032dc69fd1779256abc : winsz read side: 16, winsz write side: 17 Nov 05 18:16:03 managed-node2 systemd[1]: Started libcontainer container 1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd. -- Subject: Unit libpod-1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd.scope has finished starting up. -- -- The start-up result is done. Nov 05 18:16:03 managed-node2 conmon[28117]: conmon 1032dc69fd1779256abc : container PID: 28127 Nov 05 18:16:03 managed-node2 platform-python[27920]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml Nov 05 18:16:03 managed-node2 platform-python[27920]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod: ebe2b5a6eec80b89fff88ea654df8969642f7c5e96c95f16cbe7ce9f39251fa3 Container: 1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd Nov 05 18:16:03 managed-node2 platform-python[27920]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2024-11-05T18:16:02-05:00" level=info msg="/usr/bin/podman filtering at log level debug" time="2024-11-05T18:16:02-05:00" level=debug msg="Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2024-11-05T18:16:02-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2024-11-05T18:16:02-05:00" level=info msg="Using sqlite as database backend" time="2024-11-05T18:16:02-05:00" level=debug msg="Using graph driver overlay" time="2024-11-05T18:16:02-05:00" level=debug msg="Using graph root /var/lib/containers/storage" time="2024-11-05T18:16:02-05:00" level=debug msg="Using run root /run/containers/storage" time="2024-11-05T18:16:02-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" time="2024-11-05T18:16:02-05:00" level=debug msg="Using tmp dir /run/libpod" time="2024-11-05T18:16:02-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" time="2024-11-05T18:16:02-05:00" level=debug msg="Using transient store: false" time="2024-11-05T18:16:02-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2024-11-05T18:16:02-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2024-11-05T18:16:02-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2024-11-05T18:16:02-05:00" level=debug msg="Cached value indicated that metacopy is being used" time="2024-11-05T18:16:02-05:00" level=debug msg="Cached value indicated that native-diff is not being used" time="2024-11-05T18:16:02-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" time="2024-11-05T18:16:02-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" time="2024-11-05T18:16:02-05:00" level=debug msg="Initializing event backend file" time="2024-11-05T18:16:02-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2024-11-05T18:16:02-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2024-11-05T18:16:02-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2024-11-05T18:16:02-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2024-11-05T18:16:02-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2024-11-05T18:16:02-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2024-11-05T18:16:02-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2024-11-05T18:16:02-05:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" time="2024-11-05T18:16:02-05:00" level=info msg="Setting parallel job count to 7" time="2024-11-05T18:16:02-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2024-11-05 18:13:16.4951986 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2024-11-05T18:16:02-05:00" level=debug msg="Successfully loaded 2 networks" time="2024-11-05T18:16:02-05:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2024-11-05T18:16:02-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-11-05T18:16:02-05:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2024-11-05T18:16:02-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@44161fa65517dab7d303ed2725d6cfe848f8226e68bcf1d49928f862eeb36065\"" time="2024-11-05T18:16:02-05:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2024-11-05T18:16:02-05:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@44161fa65517dab7d303ed2725d6cfe848f8226e68bcf1d49928f862eeb36065)" time="2024-11-05T18:16:02-05:00" level=debug msg="exporting opaque data as blob \"sha256:44161fa65517dab7d303ed2725d6cfe848f8226e68bcf1d49928f862eeb36065\"" time="2024-11-05T18:16:02-05:00" level=debug msg="Pod using bridge network mode" time="2024-11-05T18:16:02-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_ebe2b5a6eec80b89fff88ea654df8969642f7c5e96c95f16cbe7ce9f39251fa3.slice for parent machine.slice and name libpod_pod_ebe2b5a6eec80b89fff88ea654df8969642f7c5e96c95f16cbe7ce9f39251fa3" time="2024-11-05T18:16:02-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_ebe2b5a6eec80b89fff88ea654df8969642f7c5e96c95f16cbe7ce9f39251fa3.slice" time="2024-11-05T18:16:02-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_ebe2b5a6eec80b89fff88ea654df8969642f7c5e96c95f16cbe7ce9f39251fa3.slice" time="2024-11-05T18:16:02-05:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2024-11-05T18:16:02-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-11-05T18:16:02-05:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2024-11-05T18:16:02-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@44161fa65517dab7d303ed2725d6cfe848f8226e68bcf1d49928f862eeb36065\"" time="2024-11-05T18:16:02-05:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2024-11-05T18:16:02-05:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@44161fa65517dab7d303ed2725d6cfe848f8226e68bcf1d49928f862eeb36065)" time="2024-11-05T18:16:02-05:00" level=debug msg="exporting opaque data as blob \"sha256:44161fa65517dab7d303ed2725d6cfe848f8226e68bcf1d49928f862eeb36065\"" time="2024-11-05T18:16:02-05:00" level=debug msg="Inspecting image 44161fa65517dab7d303ed2725d6cfe848f8226e68bcf1d49928f862eeb36065" time="2024-11-05T18:16:02-05:00" level=debug msg="exporting opaque data as blob \"sha256:44161fa65517dab7d303ed2725d6cfe848f8226e68bcf1d49928f862eeb36065\"" time="2024-11-05T18:16:02-05:00" level=debug msg="Inspecting image 44161fa65517dab7d303ed2725d6cfe848f8226e68bcf1d49928f862eeb36065" time="2024-11-05T18:16:02-05:00" level=debug msg="Inspecting image 44161fa65517dab7d303ed2725d6cfe848f8226e68bcf1d49928f862eeb36065" time="2024-11-05T18:16:02-05:00" level=debug msg="using systemd mode: false" time="2024-11-05T18:16:02-05:00" level=debug msg="setting container name ebe2b5a6eec8-infra" time="2024-11-05T18:16:02-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2024-11-05T18:16:02-05:00" level=debug msg="Allocated lock 1 for container 7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08" time="2024-11-05T18:16:02-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@44161fa65517dab7d303ed2725d6cfe848f8226e68bcf1d49928f862eeb36065\"" time="2024-11-05T18:16:02-05:00" level=debug msg="exporting opaque data as blob \"sha256:44161fa65517dab7d303ed2725d6cfe848f8226e68bcf1d49928f862eeb36065\"" time="2024-11-05T18:16:02-05:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are not supported" time="2024-11-05T18:16:02-05:00" level=debug msg="Check for idmapped mounts support " time="2024-11-05T18:16:02-05:00" level=debug msg="Created container \"7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08\"" time="2024-11-05T18:16:02-05:00" level=debug msg="Container \"7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08\" has work directory \"/var/lib/containers/storage/overlay-containers/7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08/userdata\"" time="2024-11-05T18:16:02-05:00" level=debug msg="Container \"7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08\" has run directory \"/run/containers/storage/overlay-containers/7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08/userdata\"" time="2024-11-05T18:16:02-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-11-05T18:16:02-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-11-05T18:16:02-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-11-05T18:16:02-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-11-05T18:16:02-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-11-05T18:16:02-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-11-05T18:16:02-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-11-05T18:16:02-05:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2024-11-05T18:16:02-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-11-05T18:16:02-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-11-05T18:16:02-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-11-05T18:16:02-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-11-05T18:16:02-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-11-05T18:16:02-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-11-05T18:16:02-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-11-05T18:16:02-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-11-05T18:16:02-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-11-05T18:16:02-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-11-05T18:16:02-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-11-05T18:16:02-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-11-05T18:16:02-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-11-05T18:16:02-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-11-05T18:16:02-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-11-05T18:16:02-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-11-05T18:16:02-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-11-05T18:16:02-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-11-05T18:16:02-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-11-05T18:16:02-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2024-11-05T18:16:02-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-11-05T18:16:02-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2024-11-05T18:16:02-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2024-11-05T18:16:02-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-11-05T18:16:02-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-11-05T18:16:02-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-11-05T18:16:02-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-11-05T18:16:02-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-11-05T18:16:02-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2024-11-05T18:16:02-05:00" level=debug msg="using systemd mode: false" time="2024-11-05T18:16:02-05:00" level=debug msg="adding container to pod httpd2" time="2024-11-05T18:16:02-05:00" level=debug msg="setting container name httpd2-httpd2" time="2024-11-05T18:16:02-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2024-11-05T18:16:02-05:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2024-11-05T18:16:02-05:00" level=debug msg="Adding mount /proc" time="2024-11-05T18:16:02-05:00" level=debug msg="Adding mount /dev" time="2024-11-05T18:16:02-05:00" level=debug msg="Adding mount /dev/pts" time="2024-11-05T18:16:02-05:00" level=debug msg="Adding mount /dev/mqueue" time="2024-11-05T18:16:02-05:00" level=debug msg="Adding mount /sys" time="2024-11-05T18:16:02-05:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2024-11-05T18:16:02-05:00" level=debug msg="Allocated lock 2 for container 1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd" time="2024-11-05T18:16:02-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-11-05T18:16:02-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2024-11-05T18:16:02-05:00" level=debug msg="Created container \"1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd\"" time="2024-11-05T18:16:02-05:00" level=debug msg="Container \"1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd\" has work directory \"/var/lib/containers/storage/overlay-containers/1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd/userdata\"" time="2024-11-05T18:16:02-05:00" level=debug msg="Container \"1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd\" has run directory \"/run/containers/storage/overlay-containers/1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd/userdata\"" time="2024-11-05T18:16:02-05:00" level=debug msg="Strongconnecting node 7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08" time="2024-11-05T18:16:02-05:00" level=debug msg="Pushed 7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08 onto stack" time="2024-11-05T18:16:02-05:00" level=debug msg="Finishing node 7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08. Popped 7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08 off stack" time="2024-11-05T18:16:02-05:00" level=debug msg="Strongconnecting node 1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd" time="2024-11-05T18:16:02-05:00" level=debug msg="Pushed 1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd onto stack" time="2024-11-05T18:16:02-05:00" level=debug msg="Finishing node 1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd. Popped 1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd off stack" time="2024-11-05T18:16:02-05:00" level=debug msg="overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/QOFQOVLPL5IHMNZEYMCRV5DXH4,upperdir=/var/lib/containers/storage/overlay/6220e31cf6858fe37ac0f4a1c1641478d98df30ac2bffc92b38afd71425f14a9/diff,workdir=/var/lib/containers/storage/overlay/6220e31cf6858fe37ac0f4a1c1641478d98df30ac2bffc92b38afd71425f14a9/work,nodev,metacopy=on,context=\"system_u:object_r:container_file_t:s0:c460,c537\"" time="2024-11-05T18:16:02-05:00" level=debug msg="Mounted container \"7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08\" at \"/var/lib/containers/storage/overlay/6220e31cf6858fe37ac0f4a1c1641478d98df30ac2bffc92b38afd71425f14a9/merged\"" time="2024-11-05T18:16:02-05:00" level=debug msg="Created root filesystem for container 7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08 at /var/lib/containers/storage/overlay/6220e31cf6858fe37ac0f4a1c1641478d98df30ac2bffc92b38afd71425f14a9/merged" time="2024-11-05T18:16:02-05:00" level=debug msg="Made network namespace at /run/netns/netns-aec62810-fb9d-b083-b280-c948d45ba6ec for container 7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08" time="2024-11-05T18:16:02-05:00" level=debug msg="cni result for container 7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08 network podman-default-kube-network: &{0.4.0 [{Name:cni-podman1 Mac:de:f7:58:79:c6:1e Sandbox:} {Name:vethcc1f0013 Mac:8e:53:b9:e9:a3:68 Sandbox:} {Name:eth0 Mac:ae:78:54:a0:49:36 Sandbox:/run/netns/netns-aec62810-fb9d-b083-b280-c948d45ba6ec}] [{Version:4 Interface:0xc00065a888 Address:{IP:10.89.0.2 Mask:ffffff00} Gateway:10.89.0.1}] [{Dst:{IP:0.0.0.0 Mask:00000000} GW:}] {[10.89.0.1] [dns.podman] []}}" time="2024-11-05T18:16:02-05:00" level=debug msg="/etc/system-fips does not exist on host, not mounting FIPS mode subscription" time="2024-11-05T18:16:02-05:00" level=debug msg="Setting Cgroups for container 7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08 to machine-libpod_pod_ebe2b5a6eec80b89fff88ea654df8969642f7c5e96c95f16cbe7ce9f39251fa3.slice:libpod:7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08" time="2024-11-05T18:16:02-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2024-11-05T18:16:02-05:00" level=debug msg="Workdir \"/\" resolved to host path \"/var/lib/containers/storage/overlay/6220e31cf6858fe37ac0f4a1c1641478d98df30ac2bffc92b38afd71425f14a9/merged\"" time="2024-11-05T18:16:02-05:00" level=debug msg="Created OCI spec for container 7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08 at /var/lib/containers/storage/overlay-containers/7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08/userdata/config.json" time="2024-11-05T18:16:02-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_ebe2b5a6eec80b89fff88ea654df8969642f7c5e96c95f16cbe7ce9f39251fa3.slice for parent machine.slice and name libpod_pod_ebe2b5a6eec80b89fff88ea654df8969642f7c5e96c95f16cbe7ce9f39251fa3" time="2024-11-05T18:16:02-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_ebe2b5a6eec80b89fff88ea654df8969642f7c5e96c95f16cbe7ce9f39251fa3.slice" time="2024-11-05T18:16:02-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_ebe2b5a6eec80b89fff88ea654df8969642f7c5e96c95f16cbe7ce9f39251fa3.slice" time="2024-11-05T18:16:02-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2024-11-05T18:16:02-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c 7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08 -u 7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08 -r /usr/bin/runc -b /var/lib/containers/storage/overlay-containers/7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08/userdata -p /run/containers/storage/overlay-containers/7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08/userdata/pidfile -n ebe2b5a6eec8-infra --exit-dir /run/libpod/exits --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg 7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08]" time="2024-11-05T18:16:02-05:00" level=info msg="Running conmon under slice machine-libpod_pod_ebe2b5a6eec80b89fff88ea654df8969642f7c5e96c95f16cbe7ce9f39251fa3.slice and unitName libpod-conmon-7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08.scope" time="2024-11-05T18:16:03-05:00" level=debug msg="Received: 28105" time="2024-11-05T18:16:03-05:00" level=info msg="Got Conmon PID as 28096" time="2024-11-05T18:16:03-05:00" level=debug msg="Created container 7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08 in OCI runtime" time="2024-11-05T18:16:03-05:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2024-11-05T18:16:03-05:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2024-11-05T18:16:03-05:00" level=debug msg="Starting container 7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08 with command [/catatonit -P]" time="2024-11-05T18:16:03-05:00" level=debug msg="Started container 7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08" time="2024-11-05T18:16:03-05:00" level=debug msg="overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/754KVSN4QN5IJVIU5FJINW76VX,upperdir=/var/lib/containers/storage/overlay/dabe2d8c970a825acca6312f078f615fb3deefe70bc19205004d25381824b0c2/diff,workdir=/var/lib/containers/storage/overlay/dabe2d8c970a825acca6312f078f615fb3deefe70bc19205004d25381824b0c2/work,nodev,metacopy=on,context=\"system_u:object_r:container_file_t:s0:c460,c537\"" time="2024-11-05T18:16:03-05:00" level=debug msg="Mounted container \"1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd\" at \"/var/lib/containers/storage/overlay/dabe2d8c970a825acca6312f078f615fb3deefe70bc19205004d25381824b0c2/merged\"" time="2024-11-05T18:16:03-05:00" level=debug msg="Created root filesystem for container 1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd at /var/lib/containers/storage/overlay/dabe2d8c970a825acca6312f078f615fb3deefe70bc19205004d25381824b0c2/merged" time="2024-11-05T18:16:03-05:00" level=debug msg="/etc/system-fips does not exist on host, not mounting FIPS mode subscription" time="2024-11-05T18:16:03-05:00" level=debug msg="Setting Cgroups for container 1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd to machine-libpod_pod_ebe2b5a6eec80b89fff88ea654df8969642f7c5e96c95f16cbe7ce9f39251fa3.slice:libpod:1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd" time="2024-11-05T18:16:03-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2024-11-05T18:16:03-05:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2024-11-05T18:16:03-05:00" level=debug msg="Created OCI spec for container 1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd at /var/lib/containers/storage/overlay-containers/1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd/userdata/config.json" time="2024-11-05T18:16:03-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_ebe2b5a6eec80b89fff88ea654df8969642f7c5e96c95f16cbe7ce9f39251fa3.slice for parent machine.slice and name libpod_pod_ebe2b5a6eec80b89fff88ea654df8969642f7c5e96c95f16cbe7ce9f39251fa3" time="2024-11-05T18:16:03-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_ebe2b5a6eec80b89fff88ea654df8969642f7c5e96c95f16cbe7ce9f39251fa3.slice" time="2024-11-05T18:16:03-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_ebe2b5a6eec80b89fff88ea654df8969642f7c5e96c95f16cbe7ce9f39251fa3.slice" time="2024-11-05T18:16:03-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2024-11-05T18:16:03-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c 1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd -u 1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd -r /usr/bin/runc -b /var/lib/containers/storage/overlay-containers/1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd/userdata -p /run/containers/storage/overlay-containers/1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd/userdata/pidfile -n httpd2-httpd2 --exit-dir /run/libpod/exits --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg 1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd]" time="2024-11-05T18:16:03-05:00" level=info msg="Running conmon under slice machine-libpod_pod_ebe2b5a6eec80b89fff88ea654df8969642f7c5e96c95f16cbe7ce9f39251fa3.slice and unitName libpod-conmon-1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd.scope" time="2024-11-05T18:16:03-05:00" level=debug msg="Received: 28127" time="2024-11-05T18:16:03-05:00" level=info msg="Got Conmon PID as 28117" time="2024-11-05T18:16:03-05:00" level=debug msg="Created container 1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd in OCI runtime" time="2024-11-05T18:16:03-05:00" level=debug msg="Starting container 1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd with command [/bin/busybox-extras httpd -f -p 80]" time="2024-11-05T18:16:03-05:00" level=debug msg="Started container 1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd" time="2024-11-05T18:16:03-05:00" level=debug msg="Called kube.PersistentPostRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2024-11-05T18:16:03-05:00" level=debug msg="Shutting down engines" Nov 05 18:16:03 managed-node2 platform-python[27920]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Nov 05 18:16:03 managed-node2 platform-python[28258]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Nov 05 18:16:03 managed-node2 systemd[1]: Reloading. Nov 05 18:16:04 managed-node2 dnsmasq[28094]: listening on cni-podman1(#3): fe80::dcf7:58ff:fe79:c61e%cni-podman1 Nov 05 18:16:04 managed-node2 platform-python[28419]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Nov 05 18:16:04 managed-node2 systemd[1]: Reloading. Nov 05 18:16:05 managed-node2 platform-python[28582]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Nov 05 18:16:05 managed-node2 systemd[1]: Created slice system-podman\x2dkube.slice. -- Subject: Unit system-podman\x2dkube.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit system-podman\x2dkube.slice has finished starting up. -- -- The start-up result is done. Nov 05 18:16:05 managed-node2 systemd[1]: Starting A template for running K8s workloads via podman-kube-play... -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun starting up. Nov 05 18:16:05 managed-node2 conmon[28096]: conmon 7d9f02efd188f8ae313d : container 28105 exited with status 137 Nov 05 18:16:05 managed-node2 systemd[1]: libpod-7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08.scope has successfully entered the 'dead' state. Nov 05 18:16:05 managed-node2 systemd[1]: libpod-7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08.scope: Consumed 34ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08.scope completed and consumed the indicated resources. Nov 05 18:16:05 managed-node2 /usr/bin/podman[28604]: time="2024-11-05T18:16:05-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup 7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08)" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28604]: time="2024-11-05T18:16:05-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28604]: time="2024-11-05T18:16:05-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28604]: time="2024-11-05T18:16:05-05:00" level=info msg="Using sqlite as database backend" Nov 05 18:16:05 managed-node2 conmon[28117]: conmon 1032dc69fd1779256abc : container 28127 exited with status 137 Nov 05 18:16:05 managed-node2 systemd[1]: libpod-1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd.scope has successfully entered the 'dead' state. Nov 05 18:16:05 managed-node2 systemd[1]: libpod-1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd.scope: Consumed 35ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd.scope completed and consumed the indicated resources. Nov 05 18:16:05 managed-node2 /usr/bin/podman[28616]: time="2024-11-05T18:16:05-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup 1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd)" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28616]: time="2024-11-05T18:16:05-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28616]: time="2024-11-05T18:16:05-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28616]: time="2024-11-05T18:16:05-05:00" level=info msg="Using sqlite as database backend" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28616]: time="2024-11-05T18:16:05-05:00" level=debug msg="Using graph driver overlay" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28616]: time="2024-11-05T18:16:05-05:00" level=debug msg="Using graph root /var/lib/containers/storage" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28616]: time="2024-11-05T18:16:05-05:00" level=debug msg="Using run root /run/containers/storage" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28616]: time="2024-11-05T18:16:05-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28616]: time="2024-11-05T18:16:05-05:00" level=debug msg="Using tmp dir /run/libpod" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28616]: time="2024-11-05T18:16:05-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28616]: time="2024-11-05T18:16:05-05:00" level=debug msg="Using transient store: false" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28616]: time="2024-11-05T18:16:05-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28616]: time="2024-11-05T18:16:05-05:00" level=debug msg="Cached value indicated that overlay is supported" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28616]: time="2024-11-05T18:16:05-05:00" level=debug msg="Cached value indicated that overlay is supported" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28616]: time="2024-11-05T18:16:05-05:00" level=debug msg="Cached value indicated that metacopy is being used" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28616]: time="2024-11-05T18:16:05-05:00" level=debug msg="Cached value indicated that native-diff is not being used" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28616]: time="2024-11-05T18:16:05-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28616]: time="2024-11-05T18:16:05-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28616]: time="2024-11-05T18:16:05-05:00" level=debug msg="Initializing event backend file" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28616]: time="2024-11-05T18:16:05-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28616]: time="2024-11-05T18:16:05-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28616]: time="2024-11-05T18:16:05-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28616]: time="2024-11-05T18:16:05-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28616]: time="2024-11-05T18:16:05-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28616]: time="2024-11-05T18:16:05-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28616]: time="2024-11-05T18:16:05-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28616]: time="2024-11-05T18:16:05-05:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28616]: time="2024-11-05T18:16:05-05:00" level=info msg="Setting parallel job count to 7" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28604]: time="2024-11-05T18:16:05-05:00" level=debug msg="Using graph driver overlay" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28604]: time="2024-11-05T18:16:05-05:00" level=debug msg="Using graph root /var/lib/containers/storage" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28604]: time="2024-11-05T18:16:05-05:00" level=debug msg="Using run root /run/containers/storage" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28604]: time="2024-11-05T18:16:05-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28604]: time="2024-11-05T18:16:05-05:00" level=debug msg="Using tmp dir /run/libpod" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28604]: time="2024-11-05T18:16:05-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28604]: time="2024-11-05T18:16:05-05:00" level=debug msg="Using transient store: false" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28604]: time="2024-11-05T18:16:05-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28604]: time="2024-11-05T18:16:05-05:00" level=debug msg="Cached value indicated that overlay is supported" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28604]: time="2024-11-05T18:16:05-05:00" level=debug msg="Cached value indicated that overlay is supported" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28604]: time="2024-11-05T18:16:05-05:00" level=debug msg="Cached value indicated that metacopy is being used" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28604]: time="2024-11-05T18:16:05-05:00" level=debug msg="Cached value indicated that native-diff is not being used" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28604]: time="2024-11-05T18:16:05-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28604]: time="2024-11-05T18:16:05-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28604]: time="2024-11-05T18:16:05-05:00" level=debug msg="Initializing event backend file" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28604]: time="2024-11-05T18:16:05-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28604]: time="2024-11-05T18:16:05-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28604]: time="2024-11-05T18:16:05-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28604]: time="2024-11-05T18:16:05-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28604]: time="2024-11-05T18:16:05-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28604]: time="2024-11-05T18:16:05-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28604]: time="2024-11-05T18:16:05-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28604]: time="2024-11-05T18:16:05-05:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28604]: time="2024-11-05T18:16:05-05:00" level=info msg="Setting parallel job count to 7" Nov 05 18:16:05 managed-node2 systemd[1]: var-lib-containers-storage-overlay-dabe2d8c970a825acca6312f078f615fb3deefe70bc19205004d25381824b0c2-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-dabe2d8c970a825acca6312f078f615fb3deefe70bc19205004d25381824b0c2-merged.mount has successfully entered the 'dead' state. Nov 05 18:16:05 managed-node2 /usr/bin/podman[28616]: time="2024-11-05T18:16:05-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup 1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd)" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28616]: time="2024-11-05T18:16:05-05:00" level=debug msg="Shutting down engines" Nov 05 18:16:05 managed-node2 systemd[1]: libpod-conmon-1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-conmon-1032dc69fd1779256abcb429822706cefee5d77c6dab7eb46c996e7c9e589bbd.scope has successfully entered the 'dead' state. Nov 05 18:16:05 managed-node2 kernel: cni-podman1: port 1(vethcc1f0013) entered disabled state Nov 05 18:16:05 managed-node2 kernel: device vethcc1f0013 left promiscuous mode Nov 05 18:16:05 managed-node2 kernel: cni-podman1: port 1(vethcc1f0013) entered disabled state Nov 05 18:16:05 managed-node2 systemd[1]: run-netns-netns\x2daec62810\x2dfb9d\x2db083\x2db280\x2dc948d45ba6ec.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-netns-netns\x2daec62810\x2dfb9d\x2db083\x2db280\x2dc948d45ba6ec.mount has successfully entered the 'dead' state. Nov 05 18:16:05 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08-userdata-shm.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay\x2dcontainers-7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08-userdata-shm.mount has successfully entered the 'dead' state. Nov 05 18:16:05 managed-node2 systemd[1]: var-lib-containers-storage-overlay-6220e31cf6858fe37ac0f4a1c1641478d98df30ac2bffc92b38afd71425f14a9-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-6220e31cf6858fe37ac0f4a1c1641478d98df30ac2bffc92b38afd71425f14a9-merged.mount has successfully entered the 'dead' state. Nov 05 18:16:05 managed-node2 /usr/bin/podman[28604]: time="2024-11-05T18:16:05-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup 7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08)" Nov 05 18:16:05 managed-node2 /usr/bin/podman[28604]: time="2024-11-05T18:16:05-05:00" level=debug msg="Shutting down engines" Nov 05 18:16:05 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Nov 05 18:16:05 managed-node2 systemd[1]: Stopping libpod-conmon-7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08.scope. -- Subject: Unit libpod-conmon-7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08.scope has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-conmon-7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08.scope has begun shutting down. Nov 05 18:16:05 managed-node2 systemd[1]: libpod-conmon-7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-conmon-7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08.scope has successfully entered the 'dead' state. Nov 05 18:16:05 managed-node2 systemd[1]: Stopped libpod-conmon-7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08.scope. -- Subject: Unit libpod-conmon-7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08.scope has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-conmon-7d9f02efd188f8ae313dde69d0a4512f38f345d590213f1d809a11bb9cf7dc08.scope has finished shutting down. Nov 05 18:16:05 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_ebe2b5a6eec80b89fff88ea654df8969642f7c5e96c95f16cbe7ce9f39251fa3.slice. -- Subject: Unit machine-libpod_pod_ebe2b5a6eec80b89fff88ea654df8969642f7c5e96c95f16cbe7ce9f39251fa3.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_ebe2b5a6eec80b89fff88ea654df8969642f7c5e96c95f16cbe7ce9f39251fa3.slice has finished shutting down. Nov 05 18:16:05 managed-node2 systemd[1]: machine-libpod_pod_ebe2b5a6eec80b89fff88ea654df8969642f7c5e96c95f16cbe7ce9f39251fa3.slice: Consumed 210ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit machine-libpod_pod_ebe2b5a6eec80b89fff88ea654df8969642f7c5e96c95f16cbe7ce9f39251fa3.slice completed and consumed the indicated resources. Nov 05 18:16:05 managed-node2 podman[28589]: Pods stopped: Nov 05 18:16:05 managed-node2 podman[28589]: ebe2b5a6eec80b89fff88ea654df8969642f7c5e96c95f16cbe7ce9f39251fa3 Nov 05 18:16:05 managed-node2 podman[28589]: Pods removed: Nov 05 18:16:05 managed-node2 podman[28589]: ebe2b5a6eec80b89fff88ea654df8969642f7c5e96c95f16cbe7ce9f39251fa3 Nov 05 18:16:05 managed-node2 podman[28589]: Secrets removed: Nov 05 18:16:05 managed-node2 podman[28589]: Volumes removed: Nov 05 18:16:05 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_7e96fbc2efbc3c33907e0f6c1ce9981015ab9254adc7098d4191d788d6d21977.slice. -- Subject: Unit machine-libpod_pod_7e96fbc2efbc3c33907e0f6c1ce9981015ab9254adc7098d4191d788d6d21977.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_7e96fbc2efbc3c33907e0f6c1ce9981015ab9254adc7098d4191d788d6d21977.slice has finished starting up. -- -- The start-up result is done. Nov 05 18:16:05 managed-node2 systemd[1]: Started libcontainer container 7d44b1ef188ef6fcb030bb221d63d85bbd5ee869b1097d28a39734197d7b4c1e. -- Subject: Unit libpod-7d44b1ef188ef6fcb030bb221d63d85bbd5ee869b1097d28a39734197d7b4c1e.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-7d44b1ef188ef6fcb030bb221d63d85bbd5ee869b1097d28a39734197d7b4c1e.scope has finished starting up. -- -- The start-up result is done. Nov 05 18:16:05 managed-node2 NetworkManager[673]: [1730848565.9698] manager: (veth3c36bf2f): new Veth device (/org/freedesktop/NetworkManager/Devices/5) Nov 05 18:16:05 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): veth3c36bf2f: link is not ready Nov 05 18:16:05 managed-node2 systemd-udevd[28747]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Nov 05 18:16:05 managed-node2 systemd-udevd[28747]: Could not generate persistent MAC address for veth3c36bf2f: No such file or directory Nov 05 18:16:05 managed-node2 kernel: cni-podman1: port 1(veth3c36bf2f) entered blocking state Nov 05 18:16:05 managed-node2 kernel: cni-podman1: port 1(veth3c36bf2f) entered disabled state Nov 05 18:16:05 managed-node2 kernel: device veth3c36bf2f entered promiscuous mode Nov 05 18:16:05 managed-node2 kernel: cni-podman1: port 1(veth3c36bf2f) entered blocking state Nov 05 18:16:05 managed-node2 kernel: cni-podman1: port 1(veth3c36bf2f) entered forwarding state Nov 05 18:16:05 managed-node2 kernel: cni-podman1: port 1(veth3c36bf2f) entered disabled state Nov 05 18:16:06 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): veth3c36bf2f: link becomes ready Nov 05 18:16:06 managed-node2 kernel: cni-podman1: port 1(veth3c36bf2f) entered blocking state Nov 05 18:16:06 managed-node2 kernel: cni-podman1: port 1(veth3c36bf2f) entered forwarding state Nov 05 18:16:06 managed-node2 NetworkManager[673]: [1730848566.0059] device (veth3c36bf2f): carrier: link connected Nov 05 18:16:06 managed-node2 NetworkManager[673]: [1730848566.0062] device (cni-podman1): carrier: link connected Nov 05 18:16:06 managed-node2 dnsmasq[28818]: listening on cni-podman1(#3): 10.89.0.1 Nov 05 18:16:06 managed-node2 dnsmasq[28818]: listening on cni-podman1(#3): fe80::dcf7:58ff:fe79:c61e%cni-podman1 Nov 05 18:16:06 managed-node2 dnsmasq[28822]: started, version 2.79 cachesize 150 Nov 05 18:16:06 managed-node2 dnsmasq[28822]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify Nov 05 18:16:06 managed-node2 dnsmasq[28822]: using local addresses only for domain dns.podman Nov 05 18:16:06 managed-node2 dnsmasq[28822]: reading /etc/resolv.conf Nov 05 18:16:06 managed-node2 dnsmasq[28822]: using local addresses only for domain dns.podman Nov 05 18:16:06 managed-node2 dnsmasq[28822]: using nameserver 10.29.169.13#53 Nov 05 18:16:06 managed-node2 dnsmasq[28822]: using nameserver 10.29.170.12#53 Nov 05 18:16:06 managed-node2 dnsmasq[28822]: using nameserver 10.2.32.1#53 Nov 05 18:16:06 managed-node2 dnsmasq[28822]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses Nov 05 18:16:06 managed-node2 systemd[1]: Started libcontainer container 2b355e25503c1f43cf72117f8264ceaed8f96792ddc3febaada89afbc737fad0. -- Subject: Unit libpod-2b355e25503c1f43cf72117f8264ceaed8f96792ddc3febaada89afbc737fad0.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-2b355e25503c1f43cf72117f8264ceaed8f96792ddc3febaada89afbc737fad0.scope has finished starting up. -- -- The start-up result is done. Nov 05 18:16:06 managed-node2 systemd[1]: Started libcontainer container 145c36fbe919c3680de88cc74a0fc154cda899b4c8f6e42fa0f8b22d09986469. -- Subject: Unit libpod-145c36fbe919c3680de88cc74a0fc154cda899b4c8f6e42fa0f8b22d09986469.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-145c36fbe919c3680de88cc74a0fc154cda899b4c8f6e42fa0f8b22d09986469.scope has finished starting up. -- -- The start-up result is done. Nov 05 18:16:06 managed-node2 podman[28589]: Pod: Nov 05 18:16:06 managed-node2 podman[28589]: 7e96fbc2efbc3c33907e0f6c1ce9981015ab9254adc7098d4191d788d6d21977 Nov 05 18:16:06 managed-node2 podman[28589]: Container: Nov 05 18:16:06 managed-node2 podman[28589]: 145c36fbe919c3680de88cc74a0fc154cda899b4c8f6e42fa0f8b22d09986469 Nov 05 18:16:06 managed-node2 systemd[1]: Started A template for running K8s workloads via podman-kube-play. -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished starting up. -- -- The start-up result is done. Nov 05 18:16:07 managed-node2 platform-python[28988]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:16:09 managed-node2 platform-python[29121]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd3.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:16:10 managed-node2 platform-python[29245]: ansible-file Invoked with path=/tmp/lsr_g8amvhe2_podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:16:11 managed-node2 platform-python[29368]: ansible-file Invoked with path=/tmp/lsr_g8amvhe2_podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:16:12 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Nov 05 18:16:12 managed-node2 platform-python[29656]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:16:13 managed-node2 platform-python[29780]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:16:14 managed-node2 platform-python[29903]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 05 18:16:14 managed-node2 platform-python[30002]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd3.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1730848573.8542528-17739-44104012617338/source _original_basename=tmpgm8r5kbf follow=False checksum=619a7387b950bb62a2c8508508f113e540f9e71d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:16:15 managed-node2 platform-python[30127]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Nov 05 18:16:15 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_1312a5b56f110f53699d8399b798e117efd33c46cf621c7c919c042a9e84078e.slice. -- Subject: Unit machine-libpod_pod_1312a5b56f110f53699d8399b798e117efd33c46cf621c7c919c042a9e84078e.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_1312a5b56f110f53699d8399b798e117efd33c46cf621c7c919c042a9e84078e.slice has finished starting up. -- -- The start-up result is done. Nov 05 18:16:15 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): vethae304473: link is not ready Nov 05 18:16:15 managed-node2 kernel: cni-podman1: port 2(vethae304473) entered blocking state Nov 05 18:16:15 managed-node2 kernel: cni-podman1: port 2(vethae304473) entered disabled state Nov 05 18:16:15 managed-node2 kernel: device vethae304473 entered promiscuous mode Nov 05 18:16:15 managed-node2 kernel: cni-podman1: port 2(vethae304473) entered blocking state Nov 05 18:16:15 managed-node2 kernel: cni-podman1: port 2(vethae304473) entered forwarding state Nov 05 18:16:15 managed-node2 NetworkManager[673]: [1730848575.2537] manager: (vethae304473): new Veth device (/org/freedesktop/NetworkManager/Devices/6) Nov 05 18:16:15 managed-node2 kernel: cni-podman1: port 2(vethae304473) entered disabled state Nov 05 18:16:15 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready Nov 05 18:16:15 managed-node2 systemd-udevd[30181]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Nov 05 18:16:15 managed-node2 systemd-udevd[30181]: Could not generate persistent MAC address for vethae304473: No such file or directory Nov 05 18:16:15 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready Nov 05 18:16:15 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vethae304473: link becomes ready Nov 05 18:16:15 managed-node2 kernel: cni-podman1: port 2(vethae304473) entered blocking state Nov 05 18:16:15 managed-node2 kernel: cni-podman1: port 2(vethae304473) entered forwarding state Nov 05 18:16:15 managed-node2 NetworkManager[673]: [1730848575.2708] device (vethae304473): carrier: link connected Nov 05 18:16:15 managed-node2 dnsmasq[28822]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 2 addresses Nov 05 18:16:15 managed-node2 systemd[1]: Started libpod-conmon-cd7aec51e912fade4ee1047a6951bef76c462dbcfd1e51cf59d2068d30130d56.scope. -- Subject: Unit libpod-conmon-cd7aec51e912fade4ee1047a6951bef76c462dbcfd1e51cf59d2068d30130d56.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-conmon-cd7aec51e912fade4ee1047a6951bef76c462dbcfd1e51cf59d2068d30130d56.scope has finished starting up. -- -- The start-up result is done. Nov 05 18:16:15 managed-node2 systemd[1]: Started libcontainer container cd7aec51e912fade4ee1047a6951bef76c462dbcfd1e51cf59d2068d30130d56. -- Subject: Unit libpod-cd7aec51e912fade4ee1047a6951bef76c462dbcfd1e51cf59d2068d30130d56.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-cd7aec51e912fade4ee1047a6951bef76c462dbcfd1e51cf59d2068d30130d56.scope has finished starting up. -- -- The start-up result is done. Nov 05 18:16:15 managed-node2 systemd[1]: Started libpod-conmon-7f6da02151297d67d45b5d661a7cef7cae830f113dba1f911760cf13c148dbfc.scope. -- Subject: Unit libpod-conmon-7f6da02151297d67d45b5d661a7cef7cae830f113dba1f911760cf13c148dbfc.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-conmon-7f6da02151297d67d45b5d661a7cef7cae830f113dba1f911760cf13c148dbfc.scope has finished starting up. -- -- The start-up result is done. Nov 05 18:16:15 managed-node2 systemd[1]: Started libcontainer container 7f6da02151297d67d45b5d661a7cef7cae830f113dba1f911760cf13c148dbfc. -- Subject: Unit libpod-7f6da02151297d67d45b5d661a7cef7cae830f113dba1f911760cf13c148dbfc.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-7f6da02151297d67d45b5d661a7cef7cae830f113dba1f911760cf13c148dbfc.scope has finished starting up. -- -- The start-up result is done. Nov 05 18:16:16 managed-node2 platform-python[30405]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Nov 05 18:16:16 managed-node2 systemd[1]: Reloading. Nov 05 18:16:17 managed-node2 platform-python[30566]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Nov 05 18:16:17 managed-node2 systemd[1]: Reloading. Nov 05 18:16:17 managed-node2 platform-python[30721]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Nov 05 18:16:17 managed-node2 systemd[1]: Starting A template for running K8s workloads via podman-kube-play... -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun starting up. Nov 05 18:16:17 managed-node2 systemd[1]: libpod-cd7aec51e912fade4ee1047a6951bef76c462dbcfd1e51cf59d2068d30130d56.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-cd7aec51e912fade4ee1047a6951bef76c462dbcfd1e51cf59d2068d30130d56.scope has successfully entered the 'dead' state. Nov 05 18:16:17 managed-node2 systemd[1]: libpod-cd7aec51e912fade4ee1047a6951bef76c462dbcfd1e51cf59d2068d30130d56.scope: Consumed 32ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-cd7aec51e912fade4ee1047a6951bef76c462dbcfd1e51cf59d2068d30130d56.scope completed and consumed the indicated resources. Nov 05 18:16:18 managed-node2 systemd[1]: libpod-7f6da02151297d67d45b5d661a7cef7cae830f113dba1f911760cf13c148dbfc.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-7f6da02151297d67d45b5d661a7cef7cae830f113dba1f911760cf13c148dbfc.scope has successfully entered the 'dead' state. Nov 05 18:16:18 managed-node2 systemd[1]: libpod-7f6da02151297d67d45b5d661a7cef7cae830f113dba1f911760cf13c148dbfc.scope: Consumed 33ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-7f6da02151297d67d45b5d661a7cef7cae830f113dba1f911760cf13c148dbfc.scope completed and consumed the indicated resources. Nov 05 18:16:18 managed-node2 dnsmasq[28822]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses Nov 05 18:16:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay-12bfe9996b08d4f54f1c4e067fc5facf72a1215494ac25cbbb228b66de17aa39-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-12bfe9996b08d4f54f1c4e067fc5facf72a1215494ac25cbbb228b66de17aa39-merged.mount has successfully entered the 'dead' state. Nov 05 18:16:18 managed-node2 systemd[1]: libpod-conmon-7f6da02151297d67d45b5d661a7cef7cae830f113dba1f911760cf13c148dbfc.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-conmon-7f6da02151297d67d45b5d661a7cef7cae830f113dba1f911760cf13c148dbfc.scope has successfully entered the 'dead' state. Nov 05 18:16:18 managed-node2 kernel: cni-podman1: port 2(vethae304473) entered disabled state Nov 05 18:16:18 managed-node2 kernel: device vethae304473 left promiscuous mode Nov 05 18:16:18 managed-node2 kernel: cni-podman1: port 2(vethae304473) entered disabled state Nov 05 18:16:18 managed-node2 systemd[1]: run-netns-netns\x2d19c71ca0\x2d221a\x2df59f\x2d5553\x2d5b7ece5df0d8.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-netns-netns\x2d19c71ca0\x2d221a\x2df59f\x2d5553\x2d5b7ece5df0d8.mount has successfully entered the 'dead' state. Nov 05 18:16:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-cd7aec51e912fade4ee1047a6951bef76c462dbcfd1e51cf59d2068d30130d56-userdata-shm.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay\x2dcontainers-cd7aec51e912fade4ee1047a6951bef76c462dbcfd1e51cf59d2068d30130d56-userdata-shm.mount has successfully entered the 'dead' state. Nov 05 18:16:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay-cc294a0de5fc081ca0f24ae113983ef01bed923c4fca4fa0d14c4cac56610bfb-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-cc294a0de5fc081ca0f24ae113983ef01bed923c4fca4fa0d14c4cac56610bfb-merged.mount has successfully entered the 'dead' state. Nov 05 18:16:18 managed-node2 systemd[1]: libpod-conmon-cd7aec51e912fade4ee1047a6951bef76c462dbcfd1e51cf59d2068d30130d56.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-conmon-cd7aec51e912fade4ee1047a6951bef76c462dbcfd1e51cf59d2068d30130d56.scope has successfully entered the 'dead' state. Nov 05 18:16:18 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_1312a5b56f110f53699d8399b798e117efd33c46cf621c7c919c042a9e84078e.slice. -- Subject: Unit machine-libpod_pod_1312a5b56f110f53699d8399b798e117efd33c46cf621c7c919c042a9e84078e.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_1312a5b56f110f53699d8399b798e117efd33c46cf621c7c919c042a9e84078e.slice has finished shutting down. Nov 05 18:16:18 managed-node2 systemd[1]: machine-libpod_pod_1312a5b56f110f53699d8399b798e117efd33c46cf621c7c919c042a9e84078e.slice: Consumed 227ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit machine-libpod_pod_1312a5b56f110f53699d8399b798e117efd33c46cf621c7c919c042a9e84078e.slice completed and consumed the indicated resources. Nov 05 18:16:18 managed-node2 podman[30728]: Pods stopped: Nov 05 18:16:18 managed-node2 podman[30728]: 1312a5b56f110f53699d8399b798e117efd33c46cf621c7c919c042a9e84078e Nov 05 18:16:18 managed-node2 podman[30728]: Pods removed: Nov 05 18:16:18 managed-node2 podman[30728]: 1312a5b56f110f53699d8399b798e117efd33c46cf621c7c919c042a9e84078e Nov 05 18:16:18 managed-node2 podman[30728]: Secrets removed: Nov 05 18:16:18 managed-node2 podman[30728]: Volumes removed: Nov 05 18:16:18 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_12b6dbcb195fe8f6cc5ae9b3bf4597d0f7aca115692cd7d9218640e6d163870c.slice. -- Subject: Unit machine-libpod_pod_12b6dbcb195fe8f6cc5ae9b3bf4597d0f7aca115692cd7d9218640e6d163870c.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_12b6dbcb195fe8f6cc5ae9b3bf4597d0f7aca115692cd7d9218640e6d163870c.slice has finished starting up. -- -- The start-up result is done. Nov 05 18:16:18 managed-node2 systemd[1]: Started libcontainer container 2425fa9943693439077016bf27e4f1a3e49f74001aca0311ea36c109a2428748. -- Subject: Unit libpod-2425fa9943693439077016bf27e4f1a3e49f74001aca0311ea36c109a2428748.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-2425fa9943693439077016bf27e4f1a3e49f74001aca0311ea36c109a2428748.scope has finished starting up. -- -- The start-up result is done. Nov 05 18:16:18 managed-node2 NetworkManager[673]: [1730848578.5344] manager: (veth4ee6f57d): new Veth device (/org/freedesktop/NetworkManager/Devices/7) Nov 05 18:16:18 managed-node2 systemd-udevd[30894]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Nov 05 18:16:18 managed-node2 systemd-udevd[30894]: Could not generate persistent MAC address for veth4ee6f57d: No such file or directory Nov 05 18:16:18 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): veth4ee6f57d: link is not ready Nov 05 18:16:18 managed-node2 kernel: cni-podman1: port 2(veth4ee6f57d) entered blocking state Nov 05 18:16:18 managed-node2 kernel: cni-podman1: port 2(veth4ee6f57d) entered disabled state Nov 05 18:16:18 managed-node2 kernel: device veth4ee6f57d entered promiscuous mode Nov 05 18:16:18 managed-node2 kernel: cni-podman1: port 2(veth4ee6f57d) entered blocking state Nov 05 18:16:18 managed-node2 kernel: cni-podman1: port 2(veth4ee6f57d) entered forwarding state Nov 05 18:16:18 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): veth4ee6f57d: link becomes ready Nov 05 18:16:18 managed-node2 NetworkManager[673]: [1730848578.5785] device (veth4ee6f57d): carrier: link connected Nov 05 18:16:18 managed-node2 dnsmasq[28822]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 2 addresses Nov 05 18:16:18 managed-node2 systemd[1]: Started libcontainer container bfed9ed9c65682fca4ff70aa71fda46f39c0d492d6af6f3356e4840de4d5b19e. -- Subject: Unit libpod-bfed9ed9c65682fca4ff70aa71fda46f39c0d492d6af6f3356e4840de4d5b19e.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-bfed9ed9c65682fca4ff70aa71fda46f39c0d492d6af6f3356e4840de4d5b19e.scope has finished starting up. -- -- The start-up result is done. Nov 05 18:16:18 managed-node2 systemd[1]: Started libcontainer container dc8f1be5137e8b16649630cb105a5b8adfcb2100d6d2f3f5adb7ef37776a80f0. -- Subject: Unit libpod-dc8f1be5137e8b16649630cb105a5b8adfcb2100d6d2f3f5adb7ef37776a80f0.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-dc8f1be5137e8b16649630cb105a5b8adfcb2100d6d2f3f5adb7ef37776a80f0.scope has finished starting up. -- -- The start-up result is done. Nov 05 18:16:18 managed-node2 podman[30728]: Pod: Nov 05 18:16:18 managed-node2 podman[30728]: 12b6dbcb195fe8f6cc5ae9b3bf4597d0f7aca115692cd7d9218640e6d163870c Nov 05 18:16:18 managed-node2 podman[30728]: Container: Nov 05 18:16:18 managed-node2 podman[30728]: dc8f1be5137e8b16649630cb105a5b8adfcb2100d6d2f3f5adb7ef37776a80f0 Nov 05 18:16:18 managed-node2 systemd[1]: Started A template for running K8s workloads via podman-kube-play. -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished starting up. -- -- The start-up result is done. Nov 05 18:16:19 managed-node2 sudo[31128]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-enjsswdtvrlhiqjnscyvrevijgdofdye ; /usr/libexec/platform-python /var/tmp/ansible-tmp-1730848579.4032192-17973-4376377021637/AnsiballZ_command.py' Nov 05 18:16:19 managed-node2 sudo[31128]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Nov 05 18:16:19 managed-node2 platform-python[31131]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:16:19 managed-node2 systemd[24551]: Started podman-31140.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Nov 05 18:16:19 managed-node2 sudo[31128]: pam_unix(sudo:session): session closed for user podman_basic_user Nov 05 18:16:20 managed-node2 platform-python[31278]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:16:20 managed-node2 platform-python[31409]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:16:20 managed-node2 sudo[31540]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mzwwepsmardidgppexspnlieonphwpbt ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1730848580.7474627-18022-38014383997679/AnsiballZ_command.py' Nov 05 18:16:20 managed-node2 sudo[31540]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Nov 05 18:16:21 managed-node2 platform-python[31543]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --user list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd1[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:16:21 managed-node2 sudo[31540]: pam_unix(sudo:session): session closed for user podman_basic_user Nov 05 18:16:21 managed-node2 platform-python[31669]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd2[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:16:21 managed-node2 platform-python[31795]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd3[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:16:22 managed-node2 platform-python[31921]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15001/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:16:22 managed-node2 platform-python[32047]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15002/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:16:22 managed-node2 rsyslogd[1031]: imjournal: journal files changed, reloading... [v8.2102.0-15.el8 try https://www.rsyslog.com/e/0 ] Nov 05 18:16:23 managed-node2 platform-python[32174]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /tmp/lsr_g8amvhe2_podman/httpd1-create _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:16:23 managed-node2 platform-python[32298]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /tmp/lsr_g8amvhe2_podman/httpd2-create _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:16:24 managed-node2 platform-python[32422]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /tmp/lsr_g8amvhe2_podman/httpd3-create _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:16:27 managed-node2 platform-python[32671]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:16:29 managed-node2 platform-python[32800]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:16:32 managed-node2 platform-python[32925]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Nov 05 18:16:36 managed-node2 platform-python[33049]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Nov 05 18:16:37 managed-node2 platform-python[33176]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Nov 05 18:16:38 managed-node2 platform-python[33303]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Nov 05 18:16:40 managed-node2 platform-python[33426]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Nov 05 18:16:43 managed-node2 platform-python[33550]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Nov 05 18:16:47 managed-node2 platform-python[33674]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Nov 05 18:16:49 managed-node2 platform-python[33839]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Nov 05 18:16:49 managed-node2 platform-python[33962]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Nov 05 18:16:54 managed-node2 platform-python[34085]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Nov 05 18:16:55 managed-node2 platform-python[34209]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:16:56 managed-node2 platform-python[34334]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:16:56 managed-node2 platform-python[34458]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:16:58 managed-node2 platform-python[34582]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:16:59 managed-node2 platform-python[34706]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl enable-linger podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Nov 05 18:16:59 managed-node2 platform-python[34829]: ansible-file Invoked with path=/tmp/lsr_g8amvhe2_podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:17:00 managed-node2 platform-python[34952]: ansible-file Invoked with path=/tmp/lsr_g8amvhe2_podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:17:00 managed-node2 sudo[35075]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tgbxasemujtznovcnbwwhmnrcxqznswf ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1730848620.5469327-19633-5232598344550/AnsiballZ_podman_image.py' Nov 05 18:17:00 managed-node2 sudo[35075]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Nov 05 18:17:00 managed-node2 systemd[24551]: Started podman-35080.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Nov 05 18:17:00 managed-node2 systemd[24551]: Started podman-35088.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Nov 05 18:17:00 managed-node2 systemd[24551]: Started podman-35096.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Nov 05 18:17:01 managed-node2 systemd[24551]: Started podman-35104.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Nov 05 18:17:01 managed-node2 systemd[24551]: Started podman-35112.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Nov 05 18:17:04 managed-node2 systemd[24551]: Started podman-35121.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Nov 05 18:17:04 managed-node2 sudo[35075]: pam_unix(sudo:session): session closed for user podman_basic_user Nov 05 18:17:05 managed-node2 platform-python[35250]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:17:05 managed-node2 platform-python[35375]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:17:06 managed-node2 platform-python[35498]: ansible-ansible.legacy.stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 05 18:17:06 managed-node2 platform-python[35562]: ansible-ansible.legacy.file Invoked with owner=podman_basic_user group=3001 mode=0644 dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _original_basename=tmp81sec4dr recurse=False state=file path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:17:06 managed-node2 sudo[35685]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xealynjnafwbykdsislocdkylpglvhhs ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1730848626.7399783-19934-87063009286569/AnsiballZ_podman_play.py' Nov 05 18:17:06 managed-node2 sudo[35685]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Nov 05 18:17:07 managed-node2 platform-python[35688]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Nov 05 18:17:07 managed-node2 systemd[24551]: Started podman-35696.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Nov 05 18:17:07 managed-node2 platform-python[35688]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Nov 05 18:17:07 managed-node2 platform-python[35688]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Nov 05 18:17:07 managed-node2 platform-python[35688]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2024-11-05T18:17:07-05:00" level=info msg="/bin/podman filtering at log level debug" time="2024-11-05T18:17:07-05:00" level=debug msg="Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2024-11-05T18:17:07-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2024-11-05T18:17:07-05:00" level=info msg="Using sqlite as database backend" time="2024-11-05T18:17:07-05:00" level=debug msg="Using graph driver overlay" time="2024-11-05T18:17:07-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" time="2024-11-05T18:17:07-05:00" level=debug msg="Using run root /run/user/3001/containers" time="2024-11-05T18:17:07-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" time="2024-11-05T18:17:07-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" time="2024-11-05T18:17:07-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" time="2024-11-05T18:17:07-05:00" level=debug msg="Using transient store: false" time="2024-11-05T18:17:07-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2024-11-05T18:17:07-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2024-11-05T18:17:07-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2024-11-05T18:17:07-05:00" level=debug msg="Cached value indicated that metacopy is not being used" time="2024-11-05T18:17:07-05:00" level=debug msg="Cached value indicated that native-diff is usable" time="2024-11-05T18:17:07-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" time="2024-11-05T18:17:07-05:00" level=debug msg="Initializing event backend file" time="2024-11-05T18:17:07-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2024-11-05T18:17:07-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2024-11-05T18:17:07-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2024-11-05T18:17:07-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2024-11-05T18:17:07-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2024-11-05T18:17:07-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2024-11-05T18:17:07-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2024-11-05T18:17:07-05:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" time="2024-11-05T18:17:07-05:00" level=info msg="Setting parallel job count to 7" time="2024-11-05T18:17:07-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2024-11-05 18:15:49.094227475 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2024-11-05T18:17:07-05:00" level=debug msg="Successfully loaded 2 networks" time="2024-11-05T18:17:07-05:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2024-11-05T18:17:07-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-11-05T18:17:07-05:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2024-11-05T18:17:07-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@a7735e196abd39e2ecb2b8ad6a9318872e5702b043695e8e2b718776213ebb6a\"" time="2024-11-05T18:17:07-05:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2024-11-05T18:17:07-05:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@a7735e196abd39e2ecb2b8ad6a9318872e5702b043695e8e2b718776213ebb6a)" time="2024-11-05T18:17:07-05:00" level=debug msg="exporting opaque data as blob \"sha256:a7735e196abd39e2ecb2b8ad6a9318872e5702b043695e8e2b718776213ebb6a\"" time="2024-11-05T18:17:07-05:00" level=debug msg="Pod using bridge network mode" time="2024-11-05T18:17:07-05:00" level=debug msg="Got pod cgroup as /libpod_parent/cba6eeb131f0797fc1d4dc25cec53a86e727f9a3260136a62e1084e7b08938c7" Error: adding pod to state: name "httpd1" is in use: pod already exists time="2024-11-05T18:17:07-05:00" level=debug msg="Shutting down engines" Nov 05 18:17:07 managed-node2 platform-python[35688]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 125 Nov 05 18:17:07 managed-node2 sudo[35685]: pam_unix(sudo:session): session closed for user podman_basic_user Nov 05 18:17:08 managed-node2 platform-python[35850]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Nov 05 18:17:09 managed-node2 platform-python[35974]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:17:10 managed-node2 platform-python[36099]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd2.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:17:12 managed-node2 platform-python[36223]: ansible-file Invoked with path=/tmp/lsr_g8amvhe2_podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:17:12 managed-node2 platform-python[36346]: ansible-file Invoked with path=/tmp/lsr_g8amvhe2_podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:17:15 managed-node2 platform-python[36636]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:17:15 managed-node2 platform-python[36761]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:17:16 managed-node2 platform-python[36884]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 05 18:17:16 managed-node2 platform-python[36948]: ansible-ansible.legacy.file Invoked with owner=root group=0 mode=0644 dest=/etc/containers/ansible-kubernetes.d/httpd2.yml _original_basename=tmpco465h54 recurse=False state=file path=/etc/containers/ansible-kubernetes.d/httpd2.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:17:17 managed-node2 platform-python[37071]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Nov 05 18:17:17 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_198d82a76893ed652889aaaefde79e4352603fc4a49c0e0f3f58138e5773fe08.slice. -- Subject: Unit machine-libpod_pod_198d82a76893ed652889aaaefde79e4352603fc4a49c0e0f3f58138e5773fe08.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_198d82a76893ed652889aaaefde79e4352603fc4a49c0e0f3f58138e5773fe08.slice has finished starting up. -- -- The start-up result is done. Nov 05 18:17:17 managed-node2 platform-python[37071]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml Nov 05 18:17:17 managed-node2 platform-python[37071]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Nov 05 18:17:17 managed-node2 platform-python[37071]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2024-11-05T18:17:17-05:00" level=info msg="/usr/bin/podman filtering at log level debug" time="2024-11-05T18:17:17-05:00" level=debug msg="Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2024-11-05T18:17:17-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2024-11-05T18:17:17-05:00" level=info msg="Using sqlite as database backend" time="2024-11-05T18:17:17-05:00" level=debug msg="Using graph driver overlay" time="2024-11-05T18:17:17-05:00" level=debug msg="Using graph root /var/lib/containers/storage" time="2024-11-05T18:17:17-05:00" level=debug msg="Using run root /run/containers/storage" time="2024-11-05T18:17:17-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" time="2024-11-05T18:17:17-05:00" level=debug msg="Using tmp dir /run/libpod" time="2024-11-05T18:17:17-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" time="2024-11-05T18:17:17-05:00" level=debug msg="Using transient store: false" time="2024-11-05T18:17:17-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2024-11-05T18:17:17-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2024-11-05T18:17:17-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2024-11-05T18:17:17-05:00" level=debug msg="Cached value indicated that metacopy is being used" time="2024-11-05T18:17:17-05:00" level=debug msg="Cached value indicated that native-diff is not being used" time="2024-11-05T18:17:17-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" time="2024-11-05T18:17:17-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" time="2024-11-05T18:17:17-05:00" level=debug msg="Initializing event backend file" time="2024-11-05T18:17:17-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2024-11-05T18:17:17-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2024-11-05T18:17:17-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2024-11-05T18:17:17-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2024-11-05T18:17:17-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2024-11-05T18:17:17-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2024-11-05T18:17:17-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2024-11-05T18:17:17-05:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" time="2024-11-05T18:17:17-05:00" level=info msg="Setting parallel job count to 7" time="2024-11-05T18:17:17-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2024-11-05 18:13:16.4951986 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2024-11-05T18:17:17-05:00" level=debug msg="Successfully loaded 2 networks" time="2024-11-05T18:17:17-05:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2024-11-05T18:17:17-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2024-11-05T18:17:17-05:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2024-11-05T18:17:17-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@44161fa65517dab7d303ed2725d6cfe848f8226e68bcf1d49928f862eeb36065\"" time="2024-11-05T18:17:17-05:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2024-11-05T18:17:17-05:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@44161fa65517dab7d303ed2725d6cfe848f8226e68bcf1d49928f862eeb36065)" time="2024-11-05T18:17:17-05:00" level=debug msg="exporting opaque data as blob \"sha256:44161fa65517dab7d303ed2725d6cfe848f8226e68bcf1d49928f862eeb36065\"" time="2024-11-05T18:17:17-05:00" level=debug msg="Pod using bridge network mode" time="2024-11-05T18:17:17-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_198d82a76893ed652889aaaefde79e4352603fc4a49c0e0f3f58138e5773fe08.slice for parent machine.slice and name libpod_pod_198d82a76893ed652889aaaefde79e4352603fc4a49c0e0f3f58138e5773fe08" time="2024-11-05T18:17:17-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_198d82a76893ed652889aaaefde79e4352603fc4a49c0e0f3f58138e5773fe08.slice" time="2024-11-05T18:17:17-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_198d82a76893ed652889aaaefde79e4352603fc4a49c0e0f3f58138e5773fe08.slice" Error: adding pod to state: name "httpd2" is in use: pod already exists time="2024-11-05T18:17:17-05:00" level=debug msg="Shutting down engines" Nov 05 18:17:17 managed-node2 platform-python[37071]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 125 Nov 05 18:17:18 managed-node2 platform-python[37232]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:17:19 managed-node2 platform-python[37357]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd3.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:17:21 managed-node2 platform-python[37481]: ansible-file Invoked with path=/tmp/lsr_g8amvhe2_podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:17:21 managed-node2 platform-python[37604]: ansible-file Invoked with path=/tmp/lsr_g8amvhe2_podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:17:24 managed-node2 platform-python[37894]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:17:24 managed-node2 platform-python[38019]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:17:25 managed-node2 platform-python[38142]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 05 18:17:25 managed-node2 platform-python[38206]: ansible-ansible.legacy.file Invoked with owner=root group=0 mode=0644 dest=/etc/containers/ansible-kubernetes.d/httpd3.yml _original_basename=tmpqu00c52s recurse=False state=file path=/etc/containers/ansible-kubernetes.d/httpd3.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:17:25 managed-node2 platform-python[38329]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Nov 05 18:17:25 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_24c38b006a32e92a0dd6d51d40d4ee75fd1b9290bcc349bc83bea954df709f45.slice. -- Subject: Unit machine-libpod_pod_24c38b006a32e92a0dd6d51d40d4ee75fd1b9290bcc349bc83bea954df709f45.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_24c38b006a32e92a0dd6d51d40d4ee75fd1b9290bcc349bc83bea954df709f45.slice has finished starting up. -- -- The start-up result is done. Nov 05 18:17:27 managed-node2 sudo[38490]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pawzfpreotwmselabcaprqnzbhqpqdfl ; /usr/libexec/platform-python /var/tmp/ansible-tmp-1730848646.9100769-20896-93907572224209/AnsiballZ_command.py' Nov 05 18:17:27 managed-node2 sudo[38490]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Nov 05 18:17:27 managed-node2 platform-python[38493]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:17:27 managed-node2 systemd[24551]: Started podman-38502.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Nov 05 18:17:27 managed-node2 sudo[38490]: pam_unix(sudo:session): session closed for user podman_basic_user Nov 05 18:17:27 managed-node2 platform-python[38633]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:17:28 managed-node2 platform-python[38764]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:17:28 managed-node2 sudo[38895]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-loifrjnlylqhnxbypeiosvbyabqbqjao ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1730848648.3523703-20967-242165174976592/AnsiballZ_command.py' Nov 05 18:17:28 managed-node2 sudo[38895]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Nov 05 18:17:28 managed-node2 platform-python[38898]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --user list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd1[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:17:28 managed-node2 sudo[38895]: pam_unix(sudo:session): session closed for user podman_basic_user Nov 05 18:17:28 managed-node2 platform-python[39024]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd2[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:17:29 managed-node2 platform-python[39150]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd3[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:17:30 managed-node2 platform-python[39276]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15001/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:17:30 managed-node2 platform-python[39403]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15002/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:17:31 managed-node2 platform-python[39529]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15003/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:17:34 managed-node2 platform-python[39780]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:17:35 managed-node2 platform-python[39909]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:17:39 managed-node2 platform-python[40034]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Nov 05 18:17:40 managed-node2 platform-python[40158]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:17:41 managed-node2 platform-python[40283]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:17:41 managed-node2 platform-python[40407]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:17:43 managed-node2 platform-python[40531]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:17:44 managed-node2 platform-python[40655]: ansible-stat Invoked with path=/run/user/3001 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:17:44 managed-node2 sudo[40780]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gokkyqhqyjseekvmnpkfhkwkkgerobem ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1730848664.2191374-21619-252013641658331/AnsiballZ_systemd.py' Nov 05 18:17:44 managed-node2 sudo[40780]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Nov 05 18:17:44 managed-node2 platform-python[40783]: ansible-systemd Invoked with name=podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service scope=user state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Nov 05 18:17:44 managed-node2 systemd[24551]: Reloading. Nov 05 18:17:44 managed-node2 systemd[24551]: Stopping A template for running K8s workloads via podman-kube-play... -- Subject: Unit UNIT has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun shutting down. Nov 05 18:17:44 managed-node2 kernel: cni-podman1: port 1(veth26c773e1) entered disabled state Nov 05 18:17:44 managed-node2 kernel: device veth26c773e1 left promiscuous mode Nov 05 18:17:44 managed-node2 kernel: cni-podman1: port 1(veth26c773e1) entered disabled state Nov 05 18:17:45 managed-node2 podman[40799]: Pods stopped: Nov 05 18:17:45 managed-node2 podman[40799]: 95db8d3a8cec46d84288f877334e5b6c99afafbed8df07230dc29b7473bd0f2b Nov 05 18:17:45 managed-node2 podman[40799]: Pods removed: Nov 05 18:17:45 managed-node2 podman[40799]: 95db8d3a8cec46d84288f877334e5b6c99afafbed8df07230dc29b7473bd0f2b Nov 05 18:17:45 managed-node2 podman[40799]: Secrets removed: Nov 05 18:17:45 managed-node2 podman[40799]: Volumes removed: Nov 05 18:17:45 managed-node2 systemd[24551]: Stopped A template for running K8s workloads via podman-kube-play. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Nov 05 18:17:45 managed-node2 sudo[40780]: pam_unix(sudo:session): session closed for user podman_basic_user Nov 05 18:17:45 managed-node2 platform-python[41074]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:17:46 managed-node2 sudo[41199]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cyrtkplhxmemrylzpzdmtdafrvvmmgvd ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1730848665.922027-21704-241929900069808/AnsiballZ_podman_play.py' Nov 05 18:17:46 managed-node2 sudo[41199]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Nov 05 18:17:46 managed-node2 platform-python[41202]: ansible-containers.podman.podman_play Invoked with state=absent debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Nov 05 18:17:46 managed-node2 platform-python[41202]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Nov 05 18:17:46 managed-node2 systemd[24551]: Started podman-41210.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Nov 05 18:17:46 managed-node2 platform-python[41202]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman kube play --down /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Nov 05 18:17:46 managed-node2 platform-python[41202]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pods stopped: Pods removed: Secrets removed: Volumes removed: Nov 05 18:17:46 managed-node2 platform-python[41202]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: Nov 05 18:17:46 managed-node2 platform-python[41202]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Nov 05 18:17:46 managed-node2 sudo[41199]: pam_unix(sudo:session): session closed for user podman_basic_user Nov 05 18:17:46 managed-node2 platform-python[41339]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:17:48 managed-node2 platform-python[41462]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Nov 05 18:17:48 managed-node2 platform-python[41586]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:17:50 managed-node2 platform-python[41711]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd2.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:17:51 managed-node2 platform-python[41835]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Nov 05 18:17:51 managed-node2 systemd[1]: Reloading. Nov 05 18:17:51 managed-node2 systemd[1]: Stopping A template for running K8s workloads via podman-kube-play... -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun shutting down. Nov 05 18:17:51 managed-node2 systemd[1]: libpod-2b355e25503c1f43cf72117f8264ceaed8f96792ddc3febaada89afbc737fad0.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-2b355e25503c1f43cf72117f8264ceaed8f96792ddc3febaada89afbc737fad0.scope has successfully entered the 'dead' state. Nov 05 18:17:51 managed-node2 systemd[1]: libpod-2b355e25503c1f43cf72117f8264ceaed8f96792ddc3febaada89afbc737fad0.scope: Consumed 33ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-2b355e25503c1f43cf72117f8264ceaed8f96792ddc3febaada89afbc737fad0.scope completed and consumed the indicated resources. Nov 05 18:17:51 managed-node2 systemd[1]: libpod-145c36fbe919c3680de88cc74a0fc154cda899b4c8f6e42fa0f8b22d09986469.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-145c36fbe919c3680de88cc74a0fc154cda899b4c8f6e42fa0f8b22d09986469.scope has successfully entered the 'dead' state. Nov 05 18:17:51 managed-node2 systemd[1]: libpod-145c36fbe919c3680de88cc74a0fc154cda899b4c8f6e42fa0f8b22d09986469.scope: Consumed 35ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-145c36fbe919c3680de88cc74a0fc154cda899b4c8f6e42fa0f8b22d09986469.scope completed and consumed the indicated resources. Nov 05 18:17:51 managed-node2 dnsmasq[28822]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses Nov 05 18:17:51 managed-node2 systemd[1]: var-lib-containers-storage-overlay-bba0c76b586af59bea66023b9fb39db7dc01b6763f492fdb3b4c4de5bd9cbffe-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-bba0c76b586af59bea66023b9fb39db7dc01b6763f492fdb3b4c4de5bd9cbffe-merged.mount has successfully entered the 'dead' state. Nov 05 18:17:51 managed-node2 kernel: cni-podman1: port 1(veth3c36bf2f) entered disabled state Nov 05 18:17:51 managed-node2 kernel: device veth3c36bf2f left promiscuous mode Nov 05 18:17:51 managed-node2 kernel: cni-podman1: port 1(veth3c36bf2f) entered disabled state Nov 05 18:17:51 managed-node2 systemd[1]: run-netns-netns\x2de9d9afd0\x2d39f3\x2d1f43\x2d643c\x2d56321e961aab.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-netns-netns\x2de9d9afd0\x2d39f3\x2d1f43\x2d643c\x2d56321e961aab.mount has successfully entered the 'dead' state. Nov 05 18:17:51 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-2b355e25503c1f43cf72117f8264ceaed8f96792ddc3febaada89afbc737fad0-userdata-shm.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay\x2dcontainers-2b355e25503c1f43cf72117f8264ceaed8f96792ddc3febaada89afbc737fad0-userdata-shm.mount has successfully entered the 'dead' state. Nov 05 18:17:51 managed-node2 systemd[1]: var-lib-containers-storage-overlay-12f0d00f143f96c8df1402ef9bfca689ecb3c6d64f406fed32051dff2daba52c-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-12f0d00f143f96c8df1402ef9bfca689ecb3c6d64f406fed32051dff2daba52c-merged.mount has successfully entered the 'dead' state. Nov 05 18:17:51 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_7e96fbc2efbc3c33907e0f6c1ce9981015ab9254adc7098d4191d788d6d21977.slice. -- Subject: Unit machine-libpod_pod_7e96fbc2efbc3c33907e0f6c1ce9981015ab9254adc7098d4191d788d6d21977.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_7e96fbc2efbc3c33907e0f6c1ce9981015ab9254adc7098d4191d788d6d21977.slice has finished shutting down. Nov 05 18:17:51 managed-node2 systemd[1]: machine-libpod_pod_7e96fbc2efbc3c33907e0f6c1ce9981015ab9254adc7098d4191d788d6d21977.slice: Consumed 69ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit machine-libpod_pod_7e96fbc2efbc3c33907e0f6c1ce9981015ab9254adc7098d4191d788d6d21977.slice completed and consumed the indicated resources. Nov 05 18:17:51 managed-node2 systemd[1]: libpod-7d44b1ef188ef6fcb030bb221d63d85bbd5ee869b1097d28a39734197d7b4c1e.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-7d44b1ef188ef6fcb030bb221d63d85bbd5ee869b1097d28a39734197d7b4c1e.scope has successfully entered the 'dead' state. Nov 05 18:17:51 managed-node2 systemd[1]: libpod-7d44b1ef188ef6fcb030bb221d63d85bbd5ee869b1097d28a39734197d7b4c1e.scope: Consumed 37ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-7d44b1ef188ef6fcb030bb221d63d85bbd5ee869b1097d28a39734197d7b4c1e.scope completed and consumed the indicated resources. Nov 05 18:17:52 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-7d44b1ef188ef6fcb030bb221d63d85bbd5ee869b1097d28a39734197d7b4c1e-userdata-shm.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay\x2dcontainers-7d44b1ef188ef6fcb030bb221d63d85bbd5ee869b1097d28a39734197d7b4c1e-userdata-shm.mount has successfully entered the 'dead' state. Nov 05 18:17:52 managed-node2 podman[41871]: Pods stopped: Nov 05 18:17:52 managed-node2 podman[41871]: 7e96fbc2efbc3c33907e0f6c1ce9981015ab9254adc7098d4191d788d6d21977 Nov 05 18:17:52 managed-node2 podman[41871]: Pods removed: Nov 05 18:17:52 managed-node2 podman[41871]: 7e96fbc2efbc3c33907e0f6c1ce9981015ab9254adc7098d4191d788d6d21977 Nov 05 18:17:52 managed-node2 podman[41871]: Secrets removed: Nov 05 18:17:52 managed-node2 podman[41871]: Volumes removed: Nov 05 18:17:52 managed-node2 dnsmasq[28822]: exiting on receipt of SIGTERM Nov 05 18:17:52 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has successfully entered the 'dead' state. Nov 05 18:17:52 managed-node2 systemd[1]: Stopped A template for running K8s workloads via podman-kube-play. -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished shutting down. Nov 05 18:17:52 managed-node2 platform-python[42148]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:17:52 managed-node2 systemd[1]: var-lib-containers-storage-overlay-3a330bff9d3def36cab90aaacc013038514cfebb282d7ae4cadb5f2aca67e1ed-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-3a330bff9d3def36cab90aaacc013038514cfebb282d7ae4cadb5f2aca67e1ed-merged.mount has successfully entered the 'dead' state. Nov 05 18:17:52 managed-node2 platform-python[42273]: ansible-containers.podman.podman_play Invoked with state=absent debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Nov 05 18:17:52 managed-node2 platform-python[42273]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/httpd2.yml Nov 05 18:17:53 managed-node2 platform-python[42273]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman kube play --down /etc/containers/ansible-kubernetes.d/httpd2.yml Nov 05 18:17:53 managed-node2 platform-python[42273]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pods stopped: Pods removed: Secrets removed: Volumes removed: Nov 05 18:17:53 managed-node2 platform-python[42273]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: Nov 05 18:17:53 managed-node2 platform-python[42273]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Nov 05 18:17:53 managed-node2 platform-python[42410]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:17:55 managed-node2 platform-python[42533]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:17:57 managed-node2 platform-python[42658]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd3.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:17:58 managed-node2 platform-python[42782]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Nov 05 18:17:58 managed-node2 systemd[1]: Reloading. Nov 05 18:17:58 managed-node2 systemd[1]: Stopping A template for running K8s workloads via podman-kube-play... -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun shutting down. Nov 05 18:17:58 managed-node2 systemd[1]: libpod-bfed9ed9c65682fca4ff70aa71fda46f39c0d492d6af6f3356e4840de4d5b19e.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-bfed9ed9c65682fca4ff70aa71fda46f39c0d492d6af6f3356e4840de4d5b19e.scope has successfully entered the 'dead' state. Nov 05 18:17:58 managed-node2 systemd[1]: libpod-bfed9ed9c65682fca4ff70aa71fda46f39c0d492d6af6f3356e4840de4d5b19e.scope: Consumed 32ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-bfed9ed9c65682fca4ff70aa71fda46f39c0d492d6af6f3356e4840de4d5b19e.scope completed and consumed the indicated resources. Nov 05 18:17:58 managed-node2 systemd[1]: libpod-dc8f1be5137e8b16649630cb105a5b8adfcb2100d6d2f3f5adb7ef37776a80f0.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-dc8f1be5137e8b16649630cb105a5b8adfcb2100d6d2f3f5adb7ef37776a80f0.scope has successfully entered the 'dead' state. Nov 05 18:17:58 managed-node2 systemd[1]: libpod-dc8f1be5137e8b16649630cb105a5b8adfcb2100d6d2f3f5adb7ef37776a80f0.scope: Consumed 33ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-dc8f1be5137e8b16649630cb105a5b8adfcb2100d6d2f3f5adb7ef37776a80f0.scope completed and consumed the indicated resources. Nov 05 18:17:58 managed-node2 systemd[1]: var-lib-containers-storage-overlay-5e4cc6d1925c7c9f06af9ab15a459719e9657b67bb0e0f5d40d9f2eb5c08aeaa-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-5e4cc6d1925c7c9f06af9ab15a459719e9657b67bb0e0f5d40d9f2eb5c08aeaa-merged.mount has successfully entered the 'dead' state. Nov 05 18:17:58 managed-node2 kernel: cni-podman1: port 2(veth4ee6f57d) entered disabled state Nov 05 18:17:58 managed-node2 kernel: device veth4ee6f57d left promiscuous mode Nov 05 18:17:58 managed-node2 kernel: cni-podman1: port 2(veth4ee6f57d) entered disabled state Nov 05 18:17:58 managed-node2 systemd[1]: run-netns-netns\x2df74797e3\x2db5d7\x2d5d84\x2df438\x2d26ad88751ae4.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-netns-netns\x2df74797e3\x2db5d7\x2d5d84\x2df438\x2d26ad88751ae4.mount has successfully entered the 'dead' state. Nov 05 18:17:58 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-bfed9ed9c65682fca4ff70aa71fda46f39c0d492d6af6f3356e4840de4d5b19e-userdata-shm.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay\x2dcontainers-bfed9ed9c65682fca4ff70aa71fda46f39c0d492d6af6f3356e4840de4d5b19e-userdata-shm.mount has successfully entered the 'dead' state. Nov 05 18:17:58 managed-node2 systemd[1]: var-lib-containers-storage-overlay-3c44e05b2d16ecccea7c57a0ea6648c0309a56a226f4980195567d849e35bc70-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-3c44e05b2d16ecccea7c57a0ea6648c0309a56a226f4980195567d849e35bc70-merged.mount has successfully entered the 'dead' state. Nov 05 18:17:58 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_12b6dbcb195fe8f6cc5ae9b3bf4597d0f7aca115692cd7d9218640e6d163870c.slice. -- Subject: Unit machine-libpod_pod_12b6dbcb195fe8f6cc5ae9b3bf4597d0f7aca115692cd7d9218640e6d163870c.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_12b6dbcb195fe8f6cc5ae9b3bf4597d0f7aca115692cd7d9218640e6d163870c.slice has finished shutting down. Nov 05 18:17:58 managed-node2 systemd[1]: machine-libpod_pod_12b6dbcb195fe8f6cc5ae9b3bf4597d0f7aca115692cd7d9218640e6d163870c.slice: Consumed 66ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit machine-libpod_pod_12b6dbcb195fe8f6cc5ae9b3bf4597d0f7aca115692cd7d9218640e6d163870c.slice completed and consumed the indicated resources. Nov 05 18:17:58 managed-node2 systemd[1]: libpod-2425fa9943693439077016bf27e4f1a3e49f74001aca0311ea36c109a2428748.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-2425fa9943693439077016bf27e4f1a3e49f74001aca0311ea36c109a2428748.scope has successfully entered the 'dead' state. Nov 05 18:17:58 managed-node2 systemd[1]: libpod-2425fa9943693439077016bf27e4f1a3e49f74001aca0311ea36c109a2428748.scope: Consumed 35ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-2425fa9943693439077016bf27e4f1a3e49f74001aca0311ea36c109a2428748.scope completed and consumed the indicated resources. Nov 05 18:17:58 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-2425fa9943693439077016bf27e4f1a3e49f74001aca0311ea36c109a2428748-userdata-shm.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay\x2dcontainers-2425fa9943693439077016bf27e4f1a3e49f74001aca0311ea36c109a2428748-userdata-shm.mount has successfully entered the 'dead' state. Nov 05 18:17:58 managed-node2 podman[42818]: Pods stopped: Nov 05 18:17:58 managed-node2 podman[42818]: 12b6dbcb195fe8f6cc5ae9b3bf4597d0f7aca115692cd7d9218640e6d163870c Nov 05 18:17:58 managed-node2 podman[42818]: Pods removed: Nov 05 18:17:58 managed-node2 podman[42818]: 12b6dbcb195fe8f6cc5ae9b3bf4597d0f7aca115692cd7d9218640e6d163870c Nov 05 18:17:58 managed-node2 podman[42818]: Secrets removed: Nov 05 18:17:58 managed-node2 podman[42818]: Volumes removed: Nov 05 18:17:58 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has successfully entered the 'dead' state. Nov 05 18:17:58 managed-node2 systemd[1]: Stopped A template for running K8s workloads via podman-kube-play. -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished shutting down. Nov 05 18:17:59 managed-node2 platform-python[43086]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:17:59 managed-node2 systemd[1]: var-lib-containers-storage-overlay-6307bb7e3ce3528dabcc1bfac894c63f539f720180797c783d7e2ce807e840b9-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-6307bb7e3ce3528dabcc1bfac894c63f539f720180797c783d7e2ce807e840b9-merged.mount has successfully entered the 'dead' state. Nov 05 18:17:59 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Nov 05 18:17:59 managed-node2 platform-python[43211]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Nov 05 18:17:59 managed-node2 platform-python[43211]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/httpd3.yml Nov 05 18:17:59 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Nov 05 18:18:00 managed-node2 platform-python[43347]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:18:01 managed-node2 platform-python[43470]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=True service=None split=None Nov 05 18:18:02 managed-node2 platform-python[43594]: ansible-stat Invoked with path=/run/user/3001 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:18:02 managed-node2 sudo[43719]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-brphqsgtovonfmkqttrcyvowzwzlsill ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1730848682.1875007-22366-132251678991341/AnsiballZ_podman_container_info.py' Nov 05 18:18:02 managed-node2 sudo[43719]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Nov 05 18:18:02 managed-node2 platform-python[43722]: ansible-containers.podman.podman_container_info Invoked with executable=podman name=None Nov 05 18:18:02 managed-node2 systemd[24551]: Started podman-43724.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Nov 05 18:18:02 managed-node2 sudo[43719]: pam_unix(sudo:session): session closed for user podman_basic_user Nov 05 18:18:03 managed-node2 sudo[43853]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jjexkhpcudxynvulvobxgwnykxafzwiy ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1730848683.06536-22393-136965293295939/AnsiballZ_command.py' Nov 05 18:18:03 managed-node2 sudo[43853]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Nov 05 18:18:03 managed-node2 platform-python[43856]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:18:03 managed-node2 systemd[24551]: Started podman-43858.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Nov 05 18:18:03 managed-node2 sudo[43853]: pam_unix(sudo:session): session closed for user podman_basic_user Nov 05 18:18:03 managed-node2 sudo[44012]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-enzwrbkhswabzmxnwdialonmyrvmdavx ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1730848683.5707338-22412-248871829989233/AnsiballZ_command.py' Nov 05 18:18:03 managed-node2 sudo[44012]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Nov 05 18:18:03 managed-node2 platform-python[44015]: ansible-ansible.legacy.command Invoked with _raw_params=podman secret ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:18:03 managed-node2 systemd[24551]: Started podman-44017.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Nov 05 18:18:03 managed-node2 sudo[44012]: pam_unix(sudo:session): session closed for user podman_basic_user Nov 05 18:18:04 managed-node2 platform-python[44146]: ansible-ansible.legacy.command Invoked with removes=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl disable-linger podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None stdin=None Nov 05 18:18:04 managed-node2 systemd[1]: Stopping User Manager for UID 3001... -- Subject: Unit user@3001.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user@3001.service has begun shutting down. Nov 05 18:18:04 managed-node2 systemd[24551]: Stopping D-Bus User Message Bus... -- Subject: Unit UNIT has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun shutting down. Nov 05 18:18:04 managed-node2 systemd[24551]: Stopped target Default. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Nov 05 18:18:04 managed-node2 systemd[24551]: Stopping podman-pause-1bcacabc.scope. -- Subject: Unit UNIT has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun shutting down. Nov 05 18:18:04 managed-node2 systemd[24551]: Removed slice podman\x2dkube.slice. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Nov 05 18:18:04 managed-node2 systemd[24551]: Stopped D-Bus User Message Bus. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Nov 05 18:18:04 managed-node2 systemd[24551]: Stopped target Basic System. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Nov 05 18:18:04 managed-node2 systemd[24551]: Stopped target Sockets. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Nov 05 18:18:04 managed-node2 systemd[24551]: Stopped target Paths. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Nov 05 18:18:04 managed-node2 systemd[24551]: Stopped target Timers. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Nov 05 18:18:04 managed-node2 systemd[24551]: Stopped Mark boot as successful after the user session has run 2 minutes. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Nov 05 18:18:04 managed-node2 systemd[24551]: Closed D-Bus User Message Bus Socket. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Nov 05 18:18:04 managed-node2 systemd[24551]: Stopped podman-pause-1bcacabc.scope. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Nov 05 18:18:04 managed-node2 systemd[24551]: Removed slice user.slice. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Nov 05 18:18:04 managed-node2 systemd[24551]: Reached target Shutdown. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Nov 05 18:18:04 managed-node2 systemd[24551]: Started Exit the Session. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Nov 05 18:18:04 managed-node2 systemd[24551]: Reached target Exit the Session. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Nov 05 18:18:04 managed-node2 systemd[24555]: pam_unix(systemd-user:session): session closed for user podman_basic_user Nov 05 18:18:04 managed-node2 systemd[1]: user@3001.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit user@3001.service has successfully entered the 'dead' state. Nov 05 18:18:04 managed-node2 systemd[1]: Stopped User Manager for UID 3001. -- Subject: Unit user@3001.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user@3001.service has finished shutting down. Nov 05 18:18:04 managed-node2 systemd[1]: Stopping User runtime directory /run/user/3001... -- Subject: Unit user-runtime-dir@3001.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-runtime-dir@3001.service has begun shutting down. Nov 05 18:18:04 managed-node2 systemd[1]: run-user-3001.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-user-3001.mount has successfully entered the 'dead' state. Nov 05 18:18:04 managed-node2 systemd[1]: user-runtime-dir@3001.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit user-runtime-dir@3001.service has successfully entered the 'dead' state. Nov 05 18:18:04 managed-node2 systemd[1]: Stopped User runtime directory /run/user/3001. -- Subject: Unit user-runtime-dir@3001.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-runtime-dir@3001.service has finished shutting down. Nov 05 18:18:04 managed-node2 systemd[1]: Removed slice User Slice of UID 3001. -- Subject: Unit user-3001.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-3001.slice has finished shutting down. Nov 05 18:18:04 managed-node2 platform-python[44278]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:18:05 managed-node2 platform-python[44402]: ansible-ansible.legacy.systemd Invoked with name=systemd-logind state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Nov 05 18:18:05 managed-node2 systemd[1]: Stopping Login Service... -- Subject: Unit systemd-logind.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-logind.service has begun shutting down. Nov 05 18:18:05 managed-node2 systemd[1]: systemd-logind.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit systemd-logind.service has successfully entered the 'dead' state. Nov 05 18:18:05 managed-node2 systemd[1]: Stopped Login Service. -- Subject: Unit systemd-logind.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-logind.service has finished shutting down. Nov 05 18:18:05 managed-node2 platform-python[44531]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:18:05 managed-node2 dbus-daemon[608]: [system] Activating via systemd: service name='org.freedesktop.login1' unit='dbus-org.freedesktop.login1.service' requested by ':1.200' (uid=0 pid=44533 comm="loginctl show-user --value -p State podman_basic_u" label="unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023") Nov 05 18:18:05 managed-node2 systemd[1]: Starting Login Service... -- Subject: Unit systemd-logind.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-logind.service has begun starting up. Nov 05 18:18:06 managed-node2 systemd-logind[44534]: New seat seat0. -- Subject: A new seat seat0 is now available -- Defined-By: systemd -- Support: https://access.redhat.com/support -- Documentation: https://www.freedesktop.org/wiki/Software/systemd/multiseat -- -- A new seat seat0 has been configured and is now available. Nov 05 18:18:06 managed-node2 systemd-logind[44534]: Watching system buttons on /dev/input/event0 (Power Button) Nov 05 18:18:06 managed-node2 systemd-logind[44534]: Watching system buttons on /dev/input/event1 (Sleep Button) Nov 05 18:18:06 managed-node2 systemd-logind[44534]: Watching system buttons on /dev/input/event2 (AT Translated Set 2 keyboard) Nov 05 18:18:06 managed-node2 dbus-daemon[608]: [system] Successfully activated service 'org.freedesktop.login1' Nov 05 18:18:06 managed-node2 systemd[1]: Started Login Service. -- Subject: Unit systemd-logind.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-logind.service has finished starting up. -- -- The start-up result is done. Nov 05 18:18:06 managed-node2 systemd-logind[44534]: New session 5 of user root. -- Subject: A new session 5 has been created for user root -- Defined-By: systemd -- Support: https://access.redhat.com/support -- Documentation: https://www.freedesktop.org/wiki/Software/systemd/multiseat -- -- A new session with the ID 5 has been created for the user root. -- -- The leading process of the session is 7104. Nov 05 18:18:06 managed-node2 systemd-logind[44534]: New session 3 of user root. -- Subject: A new session 3 has been created for user root -- Defined-By: systemd -- Support: https://access.redhat.com/support -- Documentation: https://www.freedesktop.org/wiki/Software/systemd/multiseat -- -- A new session with the ID 3 has been created for the user root. -- -- The leading process of the session is 5113. Nov 05 18:18:06 managed-node2 sudo[44656]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mwsoytmuwasbwbcbuupqmxrjsgwcappj ; /usr/libexec/platform-python /var/tmp/ansible-tmp-1730848686.42493-22551-215313737254565/AnsiballZ_command.py' Nov 05 18:18:06 managed-node2 sudo[44656]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Nov 05 18:18:06 managed-node2 platform-python[44659]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod exists httpd1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:18:06 managed-node2 sudo[44656]: pam_unix(sudo:session): session closed for user podman_basic_user Nov 05 18:18:07 managed-node2 platform-python[44788]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod exists httpd2 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:18:07 managed-node2 platform-python[44918]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod exists httpd3 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:18:07 managed-node2 sudo[45049]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ejorgcgwcdcjwjzgtncydgdeeikknmna ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1730848687.7598839-22614-8564100397990/AnsiballZ_command.py' Nov 05 18:18:07 managed-node2 sudo[45049]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Nov 05 18:18:08 managed-node2 platform-python[45052]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --user list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd1[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:18:08 managed-node2 sudo[45049]: pam_unix(sudo:session): session closed for user podman_basic_user Nov 05 18:18:08 managed-node2 platform-python[45178]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd2[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:18:08 managed-node2 platform-python[45304]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd3[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:18:09 managed-node2 platform-python[45430]: ansible-stat Invoked with path=/var/lib/systemd/linger/podman_basic_user follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:18:13 managed-node2 platform-python[45678]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:18:14 managed-node2 platform-python[45807]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Nov 05 18:18:15 managed-node2 platform-python[45931]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:18:19 managed-node2 platform-python[46056]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Nov 05 18:18:20 managed-node2 platform-python[46180]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:18:21 managed-node2 platform-python[46305]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:18:21 managed-node2 platform-python[46429]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:18:23 managed-node2 platform-python[46553]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:18:23 managed-node2 platform-python[46677]: ansible-stat Invoked with path=/run/user/3001 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:18:24 managed-node2 platform-python[46800]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:18:24 managed-node2 platform-python[46923]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:18:26 managed-node2 platform-python[47046]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Nov 05 18:18:26 managed-node2 platform-python[47170]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:18:28 managed-node2 platform-python[47295]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd2.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:18:29 managed-node2 platform-python[47419]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Nov 05 18:18:29 managed-node2 platform-python[47546]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:18:29 managed-node2 platform-python[47669]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:18:31 managed-node2 platform-python[47792]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:18:32 managed-node2 platform-python[47917]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd3.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:18:33 managed-node2 platform-python[48041]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Nov 05 18:18:33 managed-node2 platform-python[48168]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:18:33 managed-node2 platform-python[48291]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:18:34 managed-node2 platform-python[48414]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=True service=None split=None Nov 05 18:18:35 managed-node2 platform-python[48538]: ansible-stat Invoked with path=/run/user/3001 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:18:36 managed-node2 platform-python[48661]: ansible-file Invoked with path=/etc/containers/storage.conf state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:18:36 managed-node2 platform-python[48784]: ansible-file Invoked with path=/tmp/lsr_g8amvhe2_podman state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:18:39 managed-node2 platform-python[48946]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Nov 05 18:18:40 managed-node2 platform-python[49102]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:18:40 managed-node2 platform-python[49225]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:18:43 managed-node2 platform-python[49473]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:18:44 managed-node2 platform-python[49602]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Nov 05 18:18:44 managed-node2 platform-python[49726]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:18:47 managed-node2 platform-python[49851]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 05 18:18:49 managed-node2 platform-python[49976]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:18:50 managed-node2 platform-python[50099]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-pod-pod.pod follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 05 18:18:50 managed-node2 platform-python[50198]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1730848729.9123697-24295-180984615223804/source dest=/etc/containers/systemd/quadlet-pod-pod.pod owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=1884c880482430d8bf2e944b003734fb8b7a462d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Nov 05 18:18:51 managed-node2 platform-python[50323]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Nov 05 18:18:51 managed-node2 systemd[1]: Reloading. Nov 05 18:18:52 managed-node2 platform-python[50477]: ansible-systemd Invoked with name=quadlet-pod-pod-pod.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Nov 05 18:18:52 managed-node2 platform-python[50603]: ansible-ansible.legacy.command Invoked with _raw_params=set -x set -o pipefail exec 1>&2 #podman volume rm --all #podman network prune -f podman volume ls podman network ls podman secret ls podman container ls podman pod ls podman images systemctl list-units | grep quadlet systemctl list-unit-files | grep quadlet ls -alrtF /etc/containers/systemd /usr/libexec/podman/quadlet -dryrun -v -no-kmsg-log _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:18:52 managed-node2 systemd[1]: var-lib-containers-storage-overlay-metacopy\x2dcheck3846047098-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-metacopy\x2dcheck3846047098-merged.mount has successfully entered the 'dead' state. Nov 05 18:18:52 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Nov 05 18:18:53 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Nov 05 18:18:53 managed-node2 platform-python[50804]: ansible-ansible.legacy.command Invoked with _raw_params=grep type=AVC /var/log/audit/audit.log _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 05 18:18:54 managed-node2 platform-python[50928]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None TASK [Cleanup user] ************************************************************ task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:160 Tuesday 05 November 2024 18:18:54 -0500 (0:00:00.526) 0:00:16.107 ****** skipping: [managed-node2] => { "changed": false, "false_condition": false, "skip_reason": "Conditional result was False" } TASK [Remove test user] ******************************************************** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:169 Tuesday 05 November 2024 18:18:54 -0500 (0:00:00.038) 0:00:16.145 ****** skipping: [managed-node2] => { "changed": false, "false_condition": false, "skip_reason": "Conditional result was False" } TASK [Cleanup system - root] *************************************************** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:174 Tuesday 05 November 2024 18:18:54 -0500 (0:00:00.037) 0:00:16.183 ****** skipping: [managed-node2] => { "changed": false, "false_condition": false, "skip_reason": "Conditional result was False" } TASK [Ensure no resources] ***************************************************** task path: /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:183 Tuesday 05 November 2024 18:18:54 -0500 (0:00:00.041) 0:00:16.224 ****** skipping: [managed-node2] => { "changed": false, "false_condition": false, "skip_reason": "Conditional result was False" } PLAY RECAP ********************************************************************* managed-node2 : ok=40 changed=1 unreachable=0 failed=1 skipped=58 rescued=1 ignored=0 Tuesday 05 November 2024 18:18:54 -0500 (0:00:00.020) 0:00:16.245 ****** =============================================================================== fedora.linux_system_roles.podman : Gather the package facts ------------- 1.74s /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Gathering Facts --------------------------------------------------------- 1.19s /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:9 Debug3 ------------------------------------------------------------------ 1.10s /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:127 fedora.linux_system_roles.podman : Reload systemctl --------------------- 1.06s /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 fedora.linux_system_roles.podman : Ensure quadlet file is present ------- 0.97s /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 fedora.linux_system_roles.podman : Ensure the quadlet directory is present --- 0.65s /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 fedora.linux_system_roles.podman : Start service ------------------------ 0.62s /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Dump journal ------------------------------------------------------------ 0.53s /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:151 fedora.linux_system_roles.podman : Get podman version ------------------- 0.51s /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 fedora.linux_system_roles.podman : Get user information ----------------- 0.49s /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Check AVCs -------------------------------------------------------------- 0.48s /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:146 fedora.linux_system_roles.podman : Check if system is ostree ------------ 0.48s /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 fedora.linux_system_roles.podman : See if getsubids exists -------------- 0.43s /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 fedora.linux_system_roles.podman : See if getsubids exists -------------- 0.41s /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin --- 0.36s /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 fedora.linux_system_roles.podman : Mark user for possible linger cancel --- 0.24s /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 fedora.linux_system_roles.podman : Set per-container variables part 5 --- 0.12s /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 fedora.linux_system_roles.podman : Create and update quadlets ----------- 0.12s /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 fedora.linux_system_roles.podman : Manage linger ------------------------ 0.11s /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 fedora.linux_system_roles.podman : Handle container.conf.d -------------- 0.11s /tmp/collections-ea9/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124